pax_global_header00006660000000000000000000000064147663411340014523gustar00rootroot0000000000000052 comment=50e2665603decb5a4507509b4b406427ba23c647 es_client-8.17.4/000077500000000000000000000000001476634113400135715ustar00rootroot00000000000000es_client-8.17.4/.coveragerc000066400000000000000000000004141476634113400157110ustar00rootroot00000000000000[report] exclude_also = def __repr__ if self.debug: if settings.DEBUG raise AssertionError raise NotImplementedError if 0: if __name__ == .__main__.: if TYPE_CHECKING: class .*\bProtocol\): @(abc\.)?abstractmethod \s{4}from es_client-8.17.4/.flake8000066400000000000000000000000361476634113400147430ustar00rootroot00000000000000[flake8] max-line-length = 88 es_client-8.17.4/.gitignore000066400000000000000000000024031476634113400155600ustar00rootroot00000000000000# Project Specific Files docker_test/createrepo.json docker_test/.kurl http_ca.crt # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Flake8 .flake8 # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ cover/ .coverage .coverage.* .cache coverage.xml *.cover .hypothesis/ cov_html/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # dotenv .env # virtualenv .venv venv/ ENV/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ # VS Code .vscode es_client-8.17.4/.readthedocs.yaml000066400000000000000000000006411476634113400170210ustar00rootroot00000000000000# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Set the version of Python and other tools you might need build: os: ubuntu-22.04 tools: python: "3.12" # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py python: install: - method: pip path: . es_client-8.17.4/LICENSE000066400000000000000000000261301476634113400146000ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2022-2025 Aaron Mildenstein Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. es_client-8.17.4/README.rst000066400000000000000000000044321476634113400152630ustar00rootroot00000000000000.. _readme: es_client ========= https://es-client.readthedocs.io/ You may wonder why this even exists, as at first glance it doesn't seem to make anything any easier than just using the elasticsearch8 Python module to build a client connection. I needed to be able to reuse the more complex schema validation bits I was employing, namely: * ``master_only`` detection * Elasticsearch version checking and validation, and the option to skip this. * Configuration value validation, including file paths for SSL certificates, meaning: * No unknown keys or unacceptable parameter values are accepted * Acceptable values and ranges are established (where known)--and easy to amend, if necessary. So, if you don't need these, then this library probably isn't what you're looking for. If you want these features, then you've come to the right place. Example Usage ------------- :: from es_client import Builder config = { 'elasticsearch': { 'client': { 'hosts': 'https://10.0.0.123:9200', 'ca_certs': '/etc/elasticsearch/certs/ca.crt', 'request_timeout': 60, }, 'other_settings': { 'master_only': false, 'username': 'joe_user', 'password': 'password', } }, 'logging': { 'loglevel': 'INFO', 'logfile': '/path/to/file.log', 'logformat': 'default', } } builder = Builder(configdict=config) try: builder.connect() except: # Do exception handling here... client = builder.client Additionally, you can read from a YAML configuration file: :: --- elasticsearch: client: hosts: https://10.0.0.123:9200 ca_certs: /etc/elasticsearch/certs/ca.crt request_timeout: 60 other_settings: master_only: false username: joe_user password: password logging: loglevel: INFO logfile: /path/to/file.log logformat: default :: from es_client import Builder builder = Builder(configfile='/path/to/es_client.yml') try: builder.connect() except: # Do exception handling here... client = builder.client The same schema validations apply here as well. es_client-8.17.4/cli.py000066400000000000000000000005611476634113400147140ustar00rootroot00000000000000"""CLI Wrapper used by cli.py""" from click import echo from es_client.cli_example import run if __name__ == '__main__': try: # This is because click uses decorators, and pylint doesn't catch that # pylint: disable=no-value-for-parameter run() except RuntimeError as err: import sys echo(f'{err}') sys.exit(1) es_client-8.17.4/docker_test/000077500000000000000000000000001476634113400160775ustar00rootroot00000000000000es_client-8.17.4/docker_test/VERSION000066400000000000000000000001701476634113400171450ustar00rootroot00000000000000Version: 1.0.2 Released: 23 August 2024 # License and Changelog at https://github.com/untergeek/es-docker-test-scripts es_client-8.17.4/docker_test/ansi_clean.bash000066400000000000000000000003021476634113400210250ustar00rootroot00000000000000#!/bin/bash ansi_clean () { # This function is separate so nobody touches the control-M sequence # in the second sed stream filter echo ${1} | sed -e 's/\x1b\[[0-9;]*m//g' -e 's/ //g' } es_client-8.17.4/docker_test/common.bash000066400000000000000000000157501476634113400202360ustar00rootroot00000000000000# Common variables and functions # Source the common.bash file from the same path as the script source $(dirname "$0")/ansi_clean.bash #MANUAL_PROJECT_NAME=project_name DOCKER_PORT=9200 LOCAL_PORT=9200 URL_HOST=127.0.0.1 ESUSR=elastic ENVFILE=.env CURLFILE=.kurl REPODOCKER=/media REPOJSON=createrepo.json REPONAME=testing LIMIT=30 # How many seconds to wait to obtain the credentials IMAGE=docker.elastic.co/elasticsearch/elasticsearch MEMORY=1GB # The heap will be half of this ############################# ### Function declarations ### ############################# docker_logline () { # Return the line number that contains "${1}" echo $(docker logs ${NAME} | grep -n "${1}" | awk -F\: '{print $1}') } get_espw () { # Start with an empty value linenum='' # Make a pretty spinner spin='-\|/' # spin modulo tracker s=0 # tenths incrementer (of a second) tenths=0 # tenths modulo tracker t=0 # seconds incrementer seconds=0 # Loop until we get a valid line number, or LIMIT tries while [ "x${linenum}" == "x" ] && [ $seconds -lt $LIMIT ]; do # increment $s and modulo 4 s=$(( (s+1) %4 )) # increment $tenths ((++tenths)) # increment $t and modulo 10 t=$(( (t+1) %10 )) # if $t is 0 (it was evenly divisible by 10) if [ $t -eq 0 ]; then # we increment seconds, because 1 second has elapsed ((++seconds)) # Get the docker log line associated with elasticsearch-reset-password linenum=$(docker_logline "elasticsearch-reset-password") fi # Print the spinner to stderr (so it shows up) printf "\r${spin:$s:1} ${seconds}s elapsed (typically 15s - 25s)..." >&2 # wait 1/10th of a second before looping again sleep 0.1 done # end while loop # Error out if we didn't get it if [ "x${linenum}" == "x" ] || [ $seconds -ge $LIMIT ]; then echo "ERROR: Unable to get password for user ${ESUSR}. Unable to continue. Exiting..." exit 1 fi # Increment the linenum (because we want the next line) ((++linenum)) # Get the (next) line, i.e. incremented and tailed to isolate retval=$(docker logs ${NAME} | head -n ${linenum} | tail -1 | awk '{print $1}') # Strip the ANSI color/bold here. External function because of the control-M sequence ESPWD=$(ansi_clean "${retval}") } change_espw () { # To shorten the command-line, we put this as a variable exec_cmd=/usr/share/elasticsearch/bin/elasticsearch-reset-password ################################################# # The change password command: # # docker exec -it ${1} ${exec_cmd} -b -u $ESUSR # ################################################# ############################################################################# # Output 1: Not ready response: # # ERROR: Failed to determine the health of the cluster. , with exit code 69 # ############################################################################# ####################################################### # Output 2: Successful response: # # Password for the [elastic] user successfully reset. # # New value: NEW_PASSWORD # ####################################################### # awk '{print $3}' of the "Not ready response" is "to" # So we start with retval='to' retval='to' # We're only going to try this to the $LIMIT count=0 # Loop until we get the expected response, or LIMIT tries while [ "x$retval" == "xto" ] && [ $count -lt $LIMIT ]; do retval=$(docker exec -it ${NAME} $exec_cmd -b -u ${ESUSR} | tail -1 | awk '{print $3}') ((++count)) sleep 1 done # If we still don't have a value, send an empty reponse back, rather than "to" if [ "x${retval}" == "xto" ]; then echo '' else echo ${retval} fi } xpack_fork () { echo echo "Getting Elasticsearch credentials from container \"${NAME}\"..." echo # Get the password from the change_espw function. It sets ESPWD get_espw # If we have an empty value, that's a problem if [ "x${ESPWD}" == "x" ]; then echo "ERROR: Unable to get password for user ${ESUSR}. Unable to continue. Exiting..." exit 1 fi # Put envvars in ${ENVCFG} echo "export ESCLIENT_USERNAME=${ESUSR}" >> ${ENVCFG} echo "export TEST_USER=${ESUSR}" >> ${ENVCFG} # We escape the quotes so we can include them in case of special characters echo "export ESCLIENT_PASSWORD=\"${ESPWD}\"" >> ${ENVCFG} echo "export TEST_PASS=\"${ESPWD}\"" >> ${ENVCFG} # Get the CA certificate and copy it to the PROJECT_ROOT docker cp -q ${NAME}:/usr/share/elasticsearch/config/certs/http_ca.crt ${PROJECT_ROOT} # Put the credentials into ${CURLCFG} echo "-u ${ESUSR}:${ESPWD}" >> ${CURLCFG} echo "--cacert ${CACRT}" >> ${CURLCFG} # Complete echo "Credentials captured!" } # Save original execution path EXECPATH=$(pwd) # Extract the path for the script SCRIPTPATH="$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)" # Ensure we are in the script path cd ${SCRIPTPATH} # Get the directory name SCRIPTPATH_NAME=$(pwd | awk -F\/ '{print $NF}') # Go up a level cd ../ # Find out what the last part of this directory is called PROJECT_NAME=$(pwd | awk -F\/ '{print $NF}') # Manually override the project name, if specified if [ "x${MANUAL_PROJECT_NAME}" != "x" ]; then PROJECT_NAME=${MANUAL_PROJECT_NAME} fi # We should be at the project root dir now PROJECT_ROOT=$(pwd) if [ "${SCRIPTPATH_NAME}" != "docker_test" ]; then echo "$0 is not in parent directory 'docker_test'" echo "This could cause issues as that is expected." echo "PROJECT_ROOT is now set to ${SCRIPTPATH}" echo "You may want to set MANUAL_PROJECT_NAME in common.bash" PROJECT_ROOT=${SCRIPTPATH} fi # If we have a tests/integration path, then we'll use that if [ -d "tests/integration" ]; then TESTPATH=${PROJECT_ROOT}/tests/integration else # Otherwise we will just dump it into the $SCRIPTPATH TESTPATH=${SCRIPTPATH} fi # Set the CACRT var CACRT=${PROJECT_ROOT}/http_ca.crt # Set the .env file ENVCFG=${PROJECT_ROOT}/${ENVFILE} rm -rf ${ENVCFG} # Set the curl config file and ensure we're not reusing an old one CURLCFG=${SCRIPTPATH}/${CURLFILE} rm -rf ${CURLCFG} # Determine local IPs OS=$(uname -a | awk '{print $1}') if [[ "$OS" = "Linux" ]]; then IPLIST=$(ip -4 -o addr show scope global | grep -v docker |awk '{gsub(/\/.*/,"",$4); print $4}') elif [[ "$OS" = "Darwin" ]]; then IPLIST=$(ifconfig | awk -F "[: ]+" '/inet / { if ($2 != "127.0.0.1") print $2 }') else echo "Could not determine local IPs for assigning environment variables..." echo "Please manually determine your local non-loopback IP address and assign it," echo "e.g. TEST_ES_SERVER=https://A.B.C.D:${LOCAL_PORT}" exit 0 fi ####################### ### Set Docker vars ### ####################### # Set the Docker container name NAME=${PROJECT_NAME}-test # Set the bind mount path for the snapshot repository REPOLOCAL=${SCRIPTPATH}/repo # Navigate back to the script path cd ${SCRIPTPATH} ################### ### END COMMON ### ################### es_client-8.17.4/docker_test/create.sh000077500000000000000000000113471476634113400177070ustar00rootroot00000000000000#!/bin/bash # Source the common.bash file from the same path as the script source $(dirname "$0")/common.bash echo # Test to see if we were passed a VERSION if [ "x${1}" == "x" ]; then echo "Error! No Elasticsearch version provided." echo "VERSION must be in Semver format, e.g. X.Y.Z, 8.6.0" echo "USAGE: ${0} VERSION" exit 1 fi # Set the version VERSION=${1} ###################################### ### Setup snapshot repository path ### ###################################### # Nuke it from orbit, just to be sure rm -rf ${REPOLOCAL} mkdir -p ${REPOLOCAL} ##################### ### Run Container ### ##################### docker network rm -f ${NAME}-net > /dev/null 2>&1 docker network create ${NAME}-net > /dev/null 2>&1 # Start the container echo "Starting container \"${NAME}\" from ${IMAGE}:${VERSION}" echo -en "Container ID: " docker run -q -d -it --name ${NAME} --network ${NAME}-net -m ${MEMORY} \ -p ${LOCAL_PORT}:${DOCKER_PORT} \ -v ${REPOLOCAL}:${REPODOCKER} \ -e "discovery.type=single-node" \ -e "cluster.name=local-cluster" \ -e "node.name=local-node" \ -e "xpack.monitoring.templates.enabled=false" \ -e "path.repo=${REPODOCKER}" \ ${IMAGE}:${VERSION} # Set the URL URL=https://${URL_HOST}:${LOCAL_PORT} # Add TESTPATH to ${ENVCFG}, creating it or overwriting it echo "export CA_CRT=${PROJECT_ROOT}/http_ca.crt" >> ${ENVCFG} echo "export TEST_PATH=${TESTPATH}" >> ${ENVCFG} echo "export TEST_ES_SERVER=${URL}" >> ${ENVCFG} echo "export TEST_ES_REPO=${REPONAME}" >> ${ENVCFG} # Write some ESCLIENT_ environment variables to the .env file echo "export ESCLIENT_CA_CERTS=${CACRT}" >> ${ENVCFG} echo "export ESCLIENT_HOSTS=${URL}" >> ${ENVCFG} # Set up the curl config file, first line creates a new file, all others append echo "-o /dev/null" > ${CURLCFG} echo "-s" >> ${CURLCFG} echo '-w "%{http_code}\n"' >> ${CURLCFG} # Do the xpack_fork function, passing the container name and the .env file path xpack_fork "${NAME}" "${ENVCFG}" # Did we get a bad return code? if [ $? -eq 1 ]; then # That's an error, and we need to exit echo "ERROR! Unable to get/reset elastic user password. Unable to continue. Exiting..." exit 1 fi # We expect a 200 HTTP rsponse EXPECTED=200 # Set the NODE var NODE="${NAME} instance" # Start with an empty value ACTUAL=0 # Initialize loop counter COUNTER=0 # Loop until we get our 200 code echo while [ "${ACTUAL}" != "${EXPECTED}" ] && [ ${COUNTER} -lt ${LIMIT} ]; do # Get our actual response ACTUAL=$(curl -K ${CURLCFG} ${URL}) # Report what we received echo -en "\rHTTP status code for ${NODE} is: ${ACTUAL}" # If we got what we expected, we're great! if [ "${ACTUAL}" == "${EXPECTED}" ]; then echo " --- ${NODE} is ready!" else # Otherwise sleep and try again sleep 1 ((++COUNTER)) fi done # End while loop # If we still don't have what we expected, we hit the LIMIT if [ "${ACTUAL}" != "${EXPECTED}" ]; then echo "Unable to connect to ${URL} in ${LIMIT} seconds. Unable to continue. Exiting..." exit 1 fi # Initialize trial license echo response=$(curl -s \ --cacert ${CACRT} -u "${ESUSR}:${ESPWD}" \ -XPOST "${URL}/_license/start_trial?acknowledge=true") expected='{"acknowledged":true,"trial_was_started":true,"type":"trial"}' if [ "$response" != "$expected" ]; then echo "ERROR! Unable to start trial license!" else echo -n "Trial license started and acknowledged. " fi # Set up snapshot repository. The following will create a JSON file suitable for use with # curl -d @filename rm -f ${REPOJSON} # Build a pretty JSON object defining the repository settings echo '{' >> $REPOJSON echo ' "type": "fs",' >> $REPOJSON echo ' "settings": {' >> $REPOJSON echo -n ' "location": "' >> $REPOJSON echo -n "${REPODOCKER}" >> $REPOJSON echo '"' >> $REPOJSON echo ' }' >> $REPOJSON echo '}' >> $REPOJSON # Create snapshot repository response=$(curl -s \ --cacert ${CACRT} -u "${ESUSR}:${ESPWD}" \ -H 'Content-Type: application/json' \ -XPOST "${URL}/_snapshot/${REPONAME}?verify=false" \ --json \@${REPOJSON}) expected='{"acknowledged":true}' if [ "$response" != "$expected" ]; then echo "ERROR! Unable to create snapshot repository" else echo "Snapshot repository \"${REPONAME}\" created." rm -f ${REPOJSON} fi ################## ### Wrap it up ### ################## echo echo "${NAME} container is up using image elasticsearch:${VERSION}" echo "Ready to test!" echo if [ "$EXECPATH" == "$PROJECT_ROOT" ]; then echo "Environment variables are in .env" elif [ "$EXECPATH" == "$SCRIPTPATH" ]; then echo "\$PWD is $SCRIPTPATH." echo "Environment variables are in ../.env" else echo "Environment variables are in ${PROJECT_ROOT}/.env" fi es_client-8.17.4/docker_test/destroy.sh000077500000000000000000000014701476634113400201310ustar00rootroot00000000000000#!/bin/bash # Source the common.bash file from the same path as the script source $(dirname "$0")/common.bash echo # Stop and remove the docker container RUNNING=$(docker ps -f name=${NAME} | grep -v NAMES | awk '{print $NF}') EXISTS=$(docker ps -af name=${NAME} | grep -v NAMES | awk '{print $NF}') if [ "${RUNNING}" == "${NAME}" ]; then echo "Stopping container ${NAME}..." echo "$(docker stop ${NAME}) stopped." fi if [ "${EXISTS}" == "${NAME}" ]; then echo "Removing container ${NAME}..." echo "$(docker rm -f ${NAME}) deleted." fi # Delete Docker network docker network rm -f ${NAME}-net > /dev/null 2>&1 # Delete .env file and curl config file echo "Deleting remaining files and directories" rm -rf ${REPOLOCAL} rm -f ${ENVCFG} rm -f ${CURLCFG} rm -f ${PROJECT_ROOT}/http_ca.crt echo "Cleanup complete." es_client-8.17.4/docs/000077500000000000000000000000001476634113400145215ustar00rootroot00000000000000es_client-8.17.4/docs/Changelog.rst000066400000000000000000000763031476634113400171530ustar00rootroot00000000000000.. _changelog: Changelog ========= 8.17.4 (18 March 2025) ---------------------- **Bugfix** * Fixed a logging configuration bug to only assign a file handler if a log file is specified. Also fixed to ensure configuration goes to the root logger. **Changes** * Dependency version bumps in this release: * ``elasticsearch8==8.17.2`` * ``certifi>=2025.3.31`` 8.17.3 (6 March 2025) --------------------- **Announcement** Logging changes *************** If you specify a log file in your configuration, it will now be used, and nothing should appear different for you. If, however, you do not specify a log file, the default behavior is to log to both STDOUT `and` STDERR, with the streams split. This is the new behavior. If you do not want this, you must specify a log file in your configuration. .. code-block:: shell $ python run_script.py --loglevel DEBUG test-stderr 1>stdout.log 2>stderr.log This will log all output to ``stdout.log`` and all errors to ``stderr.log``. .. code-block:: shell $ cat stdout.log ─╯ DEBUG: Overriding configuration file setting loglevel=INFO with command-line option loglevel=DEBUG 2025-03-06 17:53:01,038 DEBUG es_client.commands test_stderr:131 This is a debug message 2025-03-06 17:53:01,038 INFO es_client.commands test_stderr:132 This is an info message Logging test complete. $ cat stderr.log 2025-03-06 17:53:01,038 WARNING es_client.commands test_stderr:133 This is a warning message 2025-03-06 17:53:01,038 ERROR es_client.commands test_stderr:134 This is an error message 2025-03-06 17:53:01,038 CRITICAL es_client.commands test_stderr:135 This is a critical message **Changes** * Changes in the ``logging.py`` file to handle the new logging behavior. Also added ``test-stderr`` to ``commands.py`` and ``cli_example.py`` to demonstrate the new behavior. * Updated ``defaults.py`` to have a default ``LOGFORMAT`` of ``default``. 8.17.2 (26 February 2025) ------------------------- **Announcement** * Attempting to allow the 8.x client to work with 7.x Elasticsearch servers by making ``min_version`` and ``max_version`` configurable at the time of ``Builder`` instantiation. The default values are still limited to 8.x versions, but preliminary testing shows that the 8.x client works just fine for Curator against 7.14.x through 7.17.x servers with these changes. **Changes** * The ``Builder`` class can now override the default minimum and/or maximum version: ``Builder(config, min_version=7.0.0, max_version=8.99.99)``. * The ``helpers.config.get_client()`` function can also take these arguments: ``helpers.config.get_client(config, min_version=7.0.0, max_version=8.99.99)``. * Updated the date and copyright holder in ``LICENSE``. 8.17.1 (24 Janary 2025) ----------------------- **Announcements** * Python 3.13 support...but with a caveat. * HUGE (potential) caveat, though. The Python 3.13 SSL implementation now has ``X509_V_FLAG_X509_STRICT`` set by default. This unfortunately means that self-signed certificates created by Elasticsearch's ``certutil`` will not work with Python 3.13 as they do not yet include the key usage extension. If you are using ``es_client`` in any way with one of these certificates, I highly recommend that you not use Python 3.13 until this is resolved. * 3.13 is excluded from the Hatch test matrix for this reason. * 3.13 will still be tested manually with each release. **Changes** * Python module version bumps: * ``elasticsearch8==8.17.1`` * ``click==8.1.8`` * ``certifi>=2024.12.14`` * Refactored ``master_only`` functions and tests. I discovered some loopholes in my code when I was testing Python 3.13 against an Elastic Cloud instance, so I fixed them. This also necessitated a change in the integration tests. 8.15.2 (30 September 2024) -------------------------- **Changes** * Python module version bumps: * ``elasticsearch8==8.15.1`` * ``pyyaml==6.0.2`` * ``certifi>=2024.8.30`` 8.15.1 (23 August 2024) ----------------------- **Changes** * Added ``commands.py`` as both a cleaner location for the ``show_all_options`` function, as well as a place it could be imported and re-used. * Updated ``docs/example.rst`` and ``docs/tutorial.rst`` to reflect these location changes. * Updated ``pytest.ini`` to automatically look for and use ``.env`` for environment variables for testing. * Using versioned ``docker_test`` scripts now from https://github.com/untergeek/es-docker-test-scripts 8.15.0 (13 August 2024) ----------------------- **Changes** * Python module version bumps: * ``elasticsearch8==8.15.0`` * Make execution scripts more consistent and PEP compliant. 8.14.2 (6 August 2024) ---------------------- **Changes** * Missed one instance of ``six`` module. 8.14.1 (6 August 2024) ---------------------- **Changes** * ``six`` module removed. * Rolled back ``voluptuous`` to be ``>=0.14.2`` to work with Python 3.8 8.14.0 (3 July 2024) -------------------- **Changes** * Python module version bumps: * ``elasticsearch8==8.14.0`` * ``ecs-logging==2.2.0`` * ``voluptuous>=0.15.2`` * ``certifi>=2024.6.2`` * Updated remaining tests to Pytest-style formatting. * Updated ``docker_test`` scripts to most recent updates. **Bugfix** * Fixed an error reported at https://github.com/elastic/curator/issues/1713 where providing an empty API ``token`` key would still result in the Builder class method ``_check_api_key`` trying to extract data. Locally tracked at https://github.com/untergeek/es_client/issues/66 8.13.5 (7 May 2024) ------------------- **Changes** * Version bump for ``elasticsearch8==8.13.1`` * Code formatting changes (cleanup of lines over 88 chars, mostly). * Added ``.coveragerc`` * Improved ``docker_test`` scripts and env var importing in tests. **Bugfix** * Discovered an instance where passwords were being logged. This has been corrected. 8.13.4 (30 April 2024) ---------------------- **Changes** * Updated ``docker_test`` scripts to enable TLS testing and better integration with pytest. TEST_USER and TEST_PASS and TEST_ES_SERVER, etc. are all populated and put into ``.env`` Even the CA certificate is copied to TEST_PATH, so it's easy for the tests to pick it up. Not incidentally, the scripts were moved from ``docker_test/scripts`` to just ``docker_test``. The tutorial in the documentation has been updated to reflect these changes. * Added ``pytest-dotenv`` as a test dependency to take advantage of the ``.env`` * Minor code formatting in most files as I've switched to using ``black`` with VS Code, and flake8, and mypy. **Bugfix** * Found 1 stray instance of ``update_settings`` from before the DotMap switch. Fixed. 8.13.3 (26 April 2024) ---------------------- **Changes** * After all that work to ensure proper typing, I forgot to include the ``py.typed`` marker file. 8.13.2 (25 April 2024) ---------------------- **Changes** * Added typing hints, everywhere. Trying to make the module play nicer with others. * Moved all code under ``src/es_client`` to be more package compliant. * Moved ``__version__`` to ``__init__.py`` * Updated the ``pyproject.toml`` file to reflect these changes. * Updated tests and documentation as needed. **Potentially Breaking Changes** * Migrated away from custom ``dict``-to-attribute class ``Args`` to ``DotMap``. It's the best of both worlds as it gives full dotted notation access to a dictionary, making it appear like class attributes. But it also still affords you the ability to treat each nested field just like a dictionary, still. ``Builder.client_args`` and ``Builder.other_args`` should look and feel the exact same as before, with one noted difference, and that is the ``.asdict()`` method has been replaced by the ``.toDict()`` method. This is the one change that might mess you up. If you are using that anywhere, please replace those calls. Also, if you were manually building these objects before, rather than supplying a config file or dict, you can create these now as follows: .. code-block:: python from es_client import Builder from dotmap import DotMap client_settings = {} # Filled with your client settings client_args = DotMap(client_settings) builder = Builder() builder.client_args = client_args # Or directly assign: builder.client_args = DotMap(client_settings) Updating a single key is simple: .. code-block:: python other_args = DotMap(other_settings) other_args.username = 'nobody' other_args['password'] = 'The Spanish Inquisition' As noted, both dotted and dict formats are acceptable, as demonstrated above. Updating with a dictionary of root level keys is simple: .. code-block:: python other_settings = { 'master_only': False, 'username': 'original', 'password': 'oldpasswd', } other_args = DotMap(other_settings) # DotMap(master_only=False, username='original', password='oldpasswd') changes = { 'master_only': True, 'username': 'newuser', 'password': 'newpasswd', } other_args.update(changes) # DotMap(master_only=True, username='newuser', password='newpasswd') If putting a nested dictionary in place, you should convert it to a DotMap first: .. code-block:: python d = {'a':'A', 'b':{'c':'C', 'd':{'e':'E'}}} dm = DotMap(d) # DotMap(a='A', b=DotMap(c='C', d=DotMap(e='E'))) b = {'b':{'g':'G', 'h':{'i':'I'}}} dm.update(b) # DotMap(a='A', b={'g': 'G', 'h': {'i': 'I'}}) # ^^^ # Not a DotMap dm.update(DotMap(b)) DotMap(a='A', b=DotMap(g='G', h=DotMap(i='I'))) It's always safest to update with a DotMap rather than a bare dict. That's about it. 8.13.1 (10 April 2024) ---------------------- **Bugfix** * Reported in #60. Newer code changes do not work properly with Python versions < 3.10 due to changes to dictionary annotations. The offending code has been patched to work around this. **Announcement** * Added infrastructure to test multiple versions of Python against the code base. This requires you to run ``pip install -U hatch hatchling``, and then ``hatch run test:test``. integration tests will fail if you do not have a local Elasticsearch running (see the ``docker_test/scripts`` directory for some help with that). 8.13.0 (2 April 2024) --------------------- **Changes** * Version bump: ``elasticsearch8==8.13.0`` 8.12.9 (26 March 2024) ---------------------- **Bugfix** * Reported in #1708. Default values (rather than None values) were overriding what was in config files. As a result, these default values from command-line settings were overriding important settings which were set properly in the configuration file. Hat tip to @rgaduput for reporting this. **Changes** * Updated cli_example.py to make the ``show_all_options`` sub-command show the proper environment variables. This entailed resetting the context_settings. A note explaining the why is now in the comments above that function. * Updates to reflect the default values in the command-line were made in the tutorial and example documentation pages. * A new documentation page was created specific to environment variables. * Version bump ``voluptuous==0.14.2`` from ``0.14.1`` 8.12.8 (20 March 2024) ---------------------- **Bugfix** * Really batting 1000 today. Missed some version bumps. 8.12.7 (20 March 2024) ---------------------- **Bugfix** * Erroneously removed ``six`` dependency. It's back at ``1.16.0``. 8.12.6 (20 March 2024) ---------------------- **Changes** * After reading and re-reading through the tutorial, I made a few doc changes. * ``ctx.obj`` is instantiated in ``helpers.config.context_settings`` now, saving yet another line of code from being needed in a functional command-line script. * Decided it was actually time to programmatically approach the huge list of decorators necessary to make ``es_client`` work in the example. Now there's a single decorator, ``@options_from_dict()`` in ``helpers.config``, and it takes a dictionary as an argument. The form of this dictionary should be: .. code-block:: python { "option1": {"onoff": {}, "override": {}, "settings": {}}, "option2": {"onoff": {}, "override": {}, "settings": {}}, # ... "optionN": {"onoff": {}, "override": {}, "settings": {}}, } The defaults are provided in ``helpers.defaults`` as constants ``OPTION_DEFAULTS`` and ``SHOW_EVERYTHING``. These can be overridden programmatically or very tediously manually. * Dependency version bumps: .. code-block:: python elasticsearch8==8.12.1 certifi==2024.2.2 8.12.5 (4 February 2024) ------------------------ **Changes** After some usage, it seems wise to remove redundancy in calling params and config in the functions in ``helpers.config``. This is especially true since ``ctx`` already has all of the params, and ``ctx.params['config']`` has the config file (if specified). It necessitated a more irritating revamp of the tests to make it work (why, Click? Why can't a Context be provided and just work?), but it does work cleanly now, with those clean looking function calls. New standards include: * ENVIRONMENT VARIABLE SUPPORT. Very big. Suffice to say that all command-line options can now be set by an environment variable by putting the prefix ``ESCLIENT_`` in front of the uppercase option name, and replace any hyphens with underscores. ``--http-compress True`` is settable by having ``ESCLIENT_HTTP_COMPRESS=1``. Boolean values are 1, 0, True, or False (case-insensitive). Options like ``hosts`` which can have multiple values just need to have whitespace between the values: .. code-block:: shell ESCLIENT_HOSTS='http://127.0.0.1:9200 http://localhost:9200' It splits perfectly. This is big news for the containerization/k8s community. You won't have to have all of the options spilled out any more. Just have the environment variables assigned. * ``ctx.obj['default_config']`` will be the place to insert a default configuration file _before_ calling ``helpers.config.get_config()``. * ``helpers.config.get_arg_objects()`` will now set ``ctx.obj['client_args'] = ClientArgs()`` and ``ctx.obj['other_args'] = OtherArgs()``, where they become part of ``ctx.obj`` and are accessible thereby. * ``helpers.config.generate_configdict`` will now populate ``ctx.obj['configdict']`` * ``Builder(configdict=ctx.obj['configdict'])`` will work, as will ``helpers.config.get_client(configdict=ctx.obj['configdict'])`` In fact, this has been so simplified now that the flow of a command-line app is as simple as: .. code-block:: python def myapp(ctx, *args): ctx.obj = {} ctx.obj['default_config'] = '/path/to/cfg.yaml' get_config(ctx) configure_logging(ctx) generate_configdict(ctx) es_client = get_client(configdict=ctx.obj['configdict']) # Your other code... Additionally, the log blacklist functionality has been added to the command-line, the default settings, the ``helpers.logging`` module, and the ``cli_example``, which should be welcome news to the containerized world. Major work to standardize the documentation has also been undertaken. In fact, there is now a tutorial on how to make a command-line app in the documentation. 8.12.4 (1 February 2024) ------------------------ **Fixes** The try/except block for Docker logging needed to be out one level farther. This should fix the permissions error issues at last. 8.12.3 (31 January 2024) ------------------------ **Change** Since I'm doing Schema validation here now, I think it appropriate to have a dedicated exception for SchemaCheck failures. This will be FailedValidation. 8.12.2 (31 January 2024) ------------------------ **Fixes** In trying to make ``SchemaCheck`` reusable, I discovered that it _always_, was unconditionally attempting apply the ``password_filter`` on every ``config`` coming through. An empty filter shows up as ``None``, causing an AttributeError exception. Going to only do ``password_filter`` when ``config`` is a ``dict``. 8.12.1 (31 January 2024) ------------------------ **Announcement** **TL;DR —** I got sick of coding the same lines over and over again, and copy/pasting between projects. I put that code here to make it easier to reuse. You can now make CLI/Click-related functionality more portable for your apps using ``es_client``. There is not really any change to the base ``Builder`` class, nor the ``ClientArgs`` or ``OtherArgs`` classes, so this is more a function of support tools and tooling for handling the overriding of config file options with those supplied at a command-line. The improvements are visible in ``cli_example.py``. Some of these changes include: * Functions that simplify overriding configuration file options with ones from the command-line. Reduces dozens of lines of code to a single function call: ``get_args(ctx.params, config)``, which overrides the values from ``config`` with the command-line parameters from Click. * Re-usable ``cli_opts`` Click option wrapper function, complete with overrides. This is demonstrated with the hidden options vs. ``show-all-options`` in ``cli_example.py``. * Support basic logging configuration with ``default``, ``json``, and ``ecs`` * New modules in ``es_client.helpers``: * ``config`` * ``logging`` * Lots and lots of tests, both unit and integration. * Updated all documentation for modules, functions, and classes accordingly. 8.12.0 (29 January 2024) ------------------------ **Changes** * Dependency version bumps in this release: * ``elasticsearch8==8.12.0`` * ``voluptuous>=0.14.1`` * ``certifi>=2023.11.17`` 8.11.0 (15 November 2023) ------------------------- **Changes** * Dependency version bumps in this version: * ``elasticsearch8==8.11.0`` * Replace ``Mock`` with ``unittest.Mock`` in unit tests. * Add Python 3.12 as a supported version (tested). 8.10.3 (2 October 2023) ----------------------- **Fixes** Missed a few of the hidden options, and found a way to force the help output to show for ``show-all-options`` without needing to add ``--help`` afterwards. 8.10.2 (2 October 2023) ----------------------- **Announcement** Again, no change in functionality. Changing some of the CLI options to be hidden by default (but still usable). These options include: * ``bearer_auth`` * ``opaque_id`` * ``http_compress`` * ``ssl_assert_hostname`` * ``ssl_assert_fingerprint`` * ``ssl_version`` * ``master-only`` * ``skip_version_test`` This will hopefully not surprise anyone too badly. I haven't heard of anyone using these options yet. The CLI examle has been configured with a ``show-all-options`` command that will show all of the hidden options. 8.10.1 (29 September 2023) -------------------------- **Announcement** No change in functionality. Adding some ways to have CLI building via Click easier for end users by making the basic arguments part of the ``es_client`` code. This is shown in the Example in the docs and in the code in file ``example_cli.py``. 8.10.0 (25 September 2023) -------------------------- **Announcement** The only changes in this release are dependency version bumps: * ``elasticsearch8==8.10.0`` * ``click==8.1.7`` 8.9.0 (31 July 2023) -------------------- **Announcement** The only changes in this release are dependency version bumps: * ``elasticsearch8==8.9.0`` * ``click==8.1.6`` * ``certifi==2023.7.22`` 8.8.2.post1 (18 July 2023) -------------------------- **Breakfix** * PyYAML 6.0.1 was released to address Cython 3 compile issues. 8.8.2 (12 July 2023) -------------------- **Announcement** Apologies for another delayed release. Weddings and funerals and graduations have kept me from releasing anything in the interim. **Changes** * Bring up to date with Elasticsearch 8.8.2 Python client * Other updated Python modules: * ``certifi>=2023.5.7`` * ``click==8.1.4`` 8.7.0 (12 April 2023) --------------------- **Announcement** Apologies for the delayed release. I have had some personal matters that had me out of office for several weeks. **Changes** * Bring up to date with Elasticsearch 8.7.0 Python client. * Add ``mock`` to the list of modules for testing 8.6.2.post1 (23 March 2023) --------------------------- **Announcement** Late 8.6.2 post-release. **Changes** * Fix certificate detection. See #33. * Add one-line API Key support (the Base64 encoded one). * Update docs to reflect base64 token API Key functionality. 8.6.2 (19 February 2023) ------------------------ **Announcement** Version sync with released Elasticsearch Python module. **Changes** * Fix ``cloud_id`` and ``hosts`` collision detection and add test to cover this case. * Code readability improvements (primarily for documentation). * Documentation readability improvements, and improved cross-linking. * Add example cli script to docs. 8.6.1.post1 (30 January 2023) ----------------------------- **Announcement** Even though I had a test in place for catching and fixing the absence of a port with ``https``, it didn't work in the field. Fix included. **Changes** * Fixed unverified URL schema issue. * Found and corrected another place where passwords were being logged inappropriately. 8.6.1 (30 January 2023) ----------------------- **Announcement** With all of these changes, I kept this in-house and did local builds and ``pip`` imports until I worked it all out. **Changes** * Circular imports between ``es_client.helpers.utils`` and ``es_client.helpers.schemacheck`` broke things. Since ``password_filter`` is not presently being used by anything else, I moved it to ``schemacheck.py``. * Use ``hatch`` and ``hatchling`` for package building instead of ``flit``. * Update ``elasticsearch8`` dependency to ``8.6.1`` * Removed the ``requirements.txt`` file as this is now handled by ``pyproject.toml`` and doing ``pip install .`` to grab dependencies and install them. YAY! Only one place to track dependencies now!!! * Removed the ``MANIFEST.in`` file as this is now handled by ``pyproject.toml`` as well. * Update the docs build settings to use Python 3.11 and ``elasticsearch8==8.6.1`` 8.6.0.post6 (26 January 2023) ----------------------------- **Announcement** I'm just cranking these out today! The truth is, I'm catching more things with the increased scrutiny of heavy Curator testing. This is good, right? **Changes** * Discovered that passwords were being logged. Added a function to replace any value from a key (from ``KEYS_TO_REDACT`` in ``defaults.py``) with ``REDACTED``. Keys are ``['password', 'basic_auth', 'bearer_auth', 'api_key', 'id', 'opaque_id']`` 8.6.0.post5 (26 January 2023) ----------------------------- **Changes** * Python 3.11 was unofficially supported in 8.6.0.post4. It is now officially listed in ``pyproject.toml`` as a supported version. * Discovered that Builder was not validating Elasticsearch host URLs, and not catching those lead to an invisible failure in Curator. 8.6.0.post4 (26 January 2023) ----------------------------- **Changes** * Fix an example in ``README.rst`` that showed the old and no longer viable way to get the client. New example reflects the current way. * Purge older setuptools files ``setup.py`` and ``setup.cfg`` in favor of building with ``flit``, using ``pyproject.toml``. Testing and dependencies here should install properly with ``pip install -U '.[test]'``. After this, testing works with ``pytest``, or ``pytest --cov=es_client --cov-report html:cov_html`` (``cov_html`` was added to ``.gitignore``). These changes appear to be necessary to build functional packages for Python 3.11. * Building now works with ``flit``. First ``pip install flit``, then ``flit build``. 8.6.0.post3 (19 January 2023) ----------------------------- **Changes** * Improve ``helpers.utils`` function ``verify_url_schema`` ability to catch malformed URLs. Added tests to verify functionality. * Improve Docker test scripts. Now there's only one set of scripts in ``docker_test/scripts``. ``create.sh`` requires a semver version of Elasticsearch at the command-line, and it will build and launch a docker image based on that version. For example, ``./create.sh 8.6.0`` will create a test image. Likewise, ``destroy.sh`` will clean it up afterwards, and also remove the ``Dockerfile`` which is created from the ``Dockerfile.tmpl`` template. 8.6.0.post2 (18 January 2023) ----------------------------- **Changes** * Move the ``get_version`` method to its own function so other programs can also use it. * Pylint cleanup of most files 8.6.0.post1 (17 January 2023) ----------------------------- **Changes** * Python prefers its own version to SemVer, so there are no changes but one of nomenclature. 8.6.0+build.2 (17 January 2023) ------------------------------- **Changes** * Improve the client configuration parsing behavior. If absolutely no config is given, then set ``hosts`` to ``http://127.0.0.1:9200``, which mirrors the ``elasticsearch8`` client default behavior. 8.6.0 (11 Janary 2023) ---------------------- **Changes** * Version bump ``elasticsearch8==8.6.0`` * Add Docker test environment for Elasticsearch 8.6.0 **Fixes** * Docker test environment for 8.5.3 was still running Elasticsearch version 8.4.3. This has been corrected. 8.5.0 (11 January 2023) ----------------------- **Changes** * Version bump ``elasticsearch8==8.5.3`` * Version bump ``certifi>=2022.12.7`` * Add Docker test env for Elasticsearch 8.5.3 8.1.0 (3 November 2022) ----------------------- **Breaking Changes** Yeah. I know. It's not semver, but I don't care. This is a needed improvement, and I'm the only one using this so far as I know, so it shouldn't affect anyone in a big way. * ``Builder`` now will not work unless you provide either a ``configdict`` or ``configfile``. It will read and verify a YAML ``configfile`` if provided without needing to do any other steps now. * ``Builder.client_args`` is not a dictionary any more, but a subclass with regular attributes. Yes, you can get and set attributes however you like now: .. code-block:: python b = Builder(configdict=mydict, autoconnect=False) print('Provided hosts = %s' % b.client_args.hosts) b.client_args.hosts = ['https://sub.domain.tld:3456'] print('Updated hosts = %s' % b.client_args.hosts) b.connect() Yes, this will effectively change the entry for ``hosts`` and connect to it instead of whatever was provided. You can still get a full ``dict`` of the client args with ``Builder.client_args.asdict()`` * ``Builder.other_args`` (reading in ``other_settings`` from the config) now works the same as ``Builder.client_args``. See the above for more info. **Changes** * Add new classes ``ClientArgs`` and ``OtherArgs``. Using classes like these make setting defaults, updates, and changes super simple. Now everything is an attribute! And it's still super simple to get a ``dict`` of settings back using ``ClientArgs.asdict()`` or ``OtherArgs.asdict()``. This change makes it super simple to create this kind of object, override settings from a default or command-line options, and then export a ``configdict`` based on these objects to ``Builder``, as you can see in the new sample script ``cli_example.py`` for overriding a config file with command-line settings. * Added *sample* CLI override capacity using ``click``. This will make Curator and other projects easier. It's not even required, but a working example helps show the possibilities. You can run whatever you like with ``click``, or stick with config files, or whatever floats your boat. * The above change also means pulling in ``click`` as a dependency. * Moved some methods out of ``Builder`` to be functions in ``es_client.helpers.utils`` instead. * Updated tests to work with all of these changes, and added new ones for new functions. 8.0.5 (28 October 2022) ----------------------- **Changes** * Version bumped `elasticsearch8` module to 8.4.3 * Version bumped `certifi` module to 2022.9.24 * Added Docker tests for Elasticsearch 8.4.3 8.0.4 (23 August 2022) ---------------------- **Changes** * Hopefully the last niggling detail. Removed erroneous reference to AWS ES and ``boto3`` compatibility from the description sent to PyPi. 8.0.3 (23 August 2022) ---------------------- **Changes** * Added ``setup_requires`` section to ``setup.cfg``. ``es_client`` doesn't _need_ to have ``setuptools`` to install. * Unpinned from top-level version of ``setuptools`` to allow anything greater than ``setuptools>=59.0.1`` to fit with Curator's need for ``cx_Freeze``, which can't currently use ``setuptools>60.10.0`` 8.0.2 (23 August 2022) ---------------------- **Changes** * Several more doc fixes to make things work on ReadTheDocs.io 8.0.1 (23 August 2022) ---------------------- **Changes** * Update test platform from ancient ``nose`` and ``UnitTest`` framework to use ``pytest``. This also allows the client to run on Python 3.10. * Update ``README.rst`` so both GitHub and PyPi reflects what's in the documentation. 8.0.0 (22 August 2022) ---------------------- **New Features** * Use ``elasticsearch8==8.3.3`` library with this release. * Updated all APIs to reflect updated library usage patterns as many APIs have changed. * Native support for API keys * Native support for Cloud ID URL types * Updated tests for better coverage * Removed all AWS authentication as the ``elasticsearch8`` library no longer connects to AWS ES instances. 1.1.1 (19 April 2018) --------------------- **Changes** * Disregard root-level keys other than ``elasticsearch`` in the supplied configuration dictionary. This makes it much easier to pass in a complete configuration and only extract the `elasticsearch` part. * Validate that a dictionary was passed, as opposed to other types. 1.1.0 (19 April 2018) --------------------- **New Features** * Add YAML configuration file reading capability so that part is included here, rather than having to be bolted on by the user later on. **Changes** * Moved some of the utility functions to the ``Builder`` class as they were not needed outside the class. While this would be a semver breaking change, the library is young enough that I think it will be okay, and it doesn't break anything else. * Put the default Elasticsearch version min and max values in ``default.py`` 1.0.1 (12 April 2018) --------------------- **Bug Fixes** * It was late, and I forgot to update ``MANIFEST.in`` to include subdirectories of ``es_client``. This has been addressed in this release. 1.0.0 (11 April 2018) --------------------- **Initial Release** es_client-8.17.4/docs/Makefile000066400000000000000000000011361476634113400161620ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXPROJ = es_client SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)es_client-8.17.4/docs/advanced.rst000066400000000000000000000164511476634113400170270ustar00rootroot00000000000000.. _tutorial2: #################### Tutorial 2: Advanced #################### ********************** It's the little things ********************** If you haven't gone through the regular :ref:`tutorial` yet, you should definitely look there first. The following are little things that will help with making that app more complete. .. _setting_version: ******************************* Setting the application version ******************************* You probably noticed that there's a version output flag in the help/usage output: .. code-block:: console -v, --version Show the version and exit. If you leave this as-is, it will only ever show the version of ``es_client``, so let's see how to change this to be our own version. =================== Where's my version? =================== Most PEP compliant releases of a project will have a ``__version__`` defined somewhere. By default, Click will attempt to guess the version from that value. It does so successfully with ``es_client`` in our example script. .. code-block:: python @click.version_option(None, '-v', '--version', prog_name="cli_example") If Click guesses wrong, you can try to tell it which package to check: .. code-block:: python @click.version_option(None, '-v', '--version', pacakge_name='es_client', prog_name="cli_example") And if that still doesn't work, you can manually specify a version: .. code-block:: python @click.version_option('X.Y.Z', '-v', '--version', prog_name="cli_example") or directly reference your ``__version__``: .. code-block:: python from es_client import __version__ # ... @click.version_option(__version__, '-v', '--version', prog_name="cli_example") With regard to ``prog_name``, the documentation says, "The name of the CLI to show in the message. If not provided, it will be detected from the command." If I leave ``prog_name`` unset and run the version output, I would see: .. code-block:: console run_script.py, version X.Y.Z But with it set, I see: .. code-block:: console cli_example, version X.Y.Z But you can also format the output of this using ``message``. According to the documentation, "The message to show. The values ``%(prog)s``, ``%(package)s``, and ``%(version)s`` are available. Defaults to ``"%(prog)s, version %(version)s"``." So if I set: .. code-block:: python @click.version_option( None, '-v', '--version', prog_name="cli_example", message='%(prog)s from %(package)s, version %(version)s') I would see: .. code-block:: console python run_script.py -v ─╯ cli_example from es_client, version X.Y.Z .. _importing: ***************************************** Importing es_client into your own project ***************************************** It's all well and good to test against the es_client code, but wouldn't you rather make use of it in your own code? ================================= Include es_client as a dependency ================================= If you're following PEP conventions, your project probably has a ``pyproject.toml`` file. Inside that file will be a header labeled ``[project]``, and under that section will be a subsection titled ``dependencies`` followed by a list of modules your project depends on. This is where you need to list ``es_client`` as a dependency: .. code-block:: dependencies = [ ... "es_client==X.Y.Z" ... ] You will probably need to do something to make sure it's imported into your virtualenv while you are coding and testing. Having it installed allows IDEs and similar coding environments to help with documentation and code completion. Installing dependencies can be accomplished by running: .. code-block:: console pip install -U . If run from the root of your project, this will install all dependencies in ``pyproject.toml``. ===================== Import into your code ===================== Once ``es_client`` is available to your code, you can import it or any of its classes, submodules, functions and constants. This pattern is visible in the example script at the top of the page: .. code-block:: python from es_client.helpers.config import ( context_settings, generate_configdict, get_client, get_config, options_from_dict) from es_client.defaults import OPTION_DEFAULTS, SHOW_EVERYTHING from es_client.helpers.logging import configure_logging ================== "Secret Borrowing" ================== "Good artists borrow. Great artists steal." (Attributed to Pablo Picasso) It's completely acceptable and appropriate to copy the :ref:`example script ` and use it as the basis for your own application. Why re-invent the wheel when you have a working wheel that you only need to tweak a bit? ----------------------------- Add your bits or link to them ----------------------------- If your code is ready to go and just needs es_client, then you should know what to do now. First, import the dependencies: .. code-block:: python import click from es_client.helpers.config import ( context_settings, generate_configdict, get_client, get_config, options_from_dict) from es_client.defaults import OPTION_DEFAULTS, SHOW_EVERYTHING from es_client.helpers.logging import configure_logging Then, create a Click command that will allow you to collect all of the settings needed to create a client connection: .. code-block:: python @click.group(context_settings=context_settings()) @options_from_dict(OPTION_DEFAULTS) @click.version_option(None, '-v', '--version', prog_name="cli_example") @click.pass_context def run(ctx, config, hosts, cloud_id, api_token, id, api_key, username, password, bearer_auth, opaque_id, request_timeout, http_compress, verify_certs, ca_certs, client_cert, client_key, ssl_assert_hostname, ssl_assert_fingerprint, ssl_version, master_only, skip_version_test, loglevel, logfile, logformat, blacklist ): """ CLI Example Any text added to a docstring will show up in the --help/usage output. Set short_help='' in @func.command() definitions for each command for terse descriptions in the main help/usage output, as with show_all_options() in this example. """ ctx.obj['default_config'] = None get_config(ctx, quiet=False) configure_logging(ctx) generate_configdict(ctx) @run.command() @click.pass_context def my_command(ctx): client = get_client(configdict=ctx.obj['configdict']) # your code goes here This will follow the pattern where you get the credentials and settings in the root-level command, and then tell it you want to run ``my_command`` where a client connection will be established and then your code uses it however you like! Note that we use the name of our root-level command as the name of the decorator: ``@run.command()``. This guarantees that ``my_command`` will be a sub-command of ``run``. To run this automatically when this file is called, put this at the end of the file: .. code-block:: python if __name__ == '__main__': run() Calling your script like ``python my_script.py`` will now automatically call your ``run`` function, and you're on your way! .. _more_advanced: **************** Watch This Space **************** More advanced tutorials will follow!es_client-8.17.4/docs/api.rst000066400000000000000000000046261476634113400160340ustar00rootroot00000000000000.. _api: ES Client API reference ####################### .. _builder: Builder Class ============= .. autoclass:: es_client.builder.Builder :members: Builder Attribute Errata ------------------------ :client: The :py:class:`~.elasticsearch.Elasticsearch` object is only created after passing all other tests, and if ``autoconnect`` is ``True``, or :py:meth:`~.es_client.builder.Builder.connect` has been called. :is_master: Initially set to ``None``, this value is set automatically if ``autoconnect`` is ``True``. It can otherwise be set by calling :py:meth:`~.es_client.builder.Builder._find_master` after :py:meth:`~.es_client.builder.Builder._get_client` has been called first. Class Instantiation Flow ------------------------ #. Check to see if ``elasticsearch`` key is in the supplied ``raw_config`` dictionary. Log a warning about using defaults if it is not. #. Run :py:meth:`~.es_client.builder.Builder._check_config` on ``raw_config`` #. Set instance attributes ``version_max`` and ``version_min`` with the provided values. #. Set instance attribute ``master_only`` to the value from ``raw_config`` #. Initialize instance attribute ``is_master`` with a ``None`` #. Set instance attribute ``skip_version_test`` to the value from ``raw_config`` #. Set instance attribute ``client_args`` to the value of ``raw_config['elasticsearch']['client']`` #. Execute :py:meth:`~.es_client.builder.Builder._check_basic_auth` to build the ``basic_auth`` tuple, if ``username`` and ``password`` are not ``None``. #. Execute :py:meth:`~.es_client.builder.Builder._check_api_key` to build the ``api_key`` tuple, if the ``id`` and ``api_key`` sub-keys are not ``None``. #. Execute :py:meth:`~.es_client.builder.Builder._check_cloud_id` to ensure the client connects to the defined ``cloud_id`` rather than anything in ``hosts``. #. Execute :py:meth:`~.es_client.builder.Builder._check_ssl` to ensure we have at least the `certifi `_ signing certificates. #. If ``autoconnect`` is `True`: #. Execute :py:meth:`~.es_client.builder.Builder._get_client` to finally build the :py:class:`~.elasticsearch.Elasticsearch` client object. #. Execute :py:meth:`~.es_client.builder.Builder._check_version` and :py:meth:`~.es_client.builder.Builder._check_master` as post-checks. Nothing will happen if these checks are not enabled in ``raw_config`` es_client-8.17.4/docs/conf.py000066400000000000000000000050701476634113400160220ustar00rootroot00000000000000"""Sphinx Documentation Configuration""" # -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pylint: disable=redefined-builtin, invalid-name import sys import os from datetime import datetime from es_client import __version__ as ver COPYRIGHT_YEARS = f"2022-{datetime.now().year}" # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("../")) extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.intersphinx"] autoclass_content = "both" # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The master toctree document. master_doc = "index" # General information about the project. project = "es_client" author = "Aaron Mildenstein" copyright = f"{COPYRIGHT_YEARS}, Aaron Mildenstein" release = ver version = ".".join(release.split(".")[:2]) exclude_patterns = ["_build"] pygments_style = "sphinx" on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] intersphinx_mapping = { "python": ("https://docs.python.org/3.12", None), "elasticsearch8": ("https://elasticsearch-py.readthedocs.io/en/v8.17.2", None), "elastic-transport": ( "https://elastic-transport-python.readthedocs.io/en/stable", None, ), "voluptuous": ("http://alecthomas.github.io/voluptuous/docs/_build/html", None), "click": ("https://click.palletsprojects.com/en/8.1.x", None), } es_client-8.17.4/docs/defaults.rst000066400000000000000000000206131476634113400170640ustar00rootroot00000000000000.. _defaults: Default Values -------------- .. _client_configuration: Client Configuration ==================== The :py:class:`~.esclient.Builder` class expects either a ``dict`` (`configdict`) or a YAML file (`configfile`) of configuration settings. Whichever is used, both must contain the top level key: ``elasticsearch``. The top level key ``logging`` is also acceptable as outlined. This is an example of what the structure looks like with many keys present (some contradictory, but shown for reference):: { 'elasticsearch': { 'client': { 'hosts': ..., 'request_timeout': ..., 'verify_certs': ..., 'ca_certs': ..., 'client_cert': ..., 'client_key': ..., 'ssl_version': ..., 'ssl_assert_hostname': ..., 'ssl_assert_fingerprint': ..., 'headers': { 'key1': ..., }, 'http_compress': ..., }, 'other_settings': { 'master_only': ..., 'skip_version_test': ..., 'username': ..., 'password': ..., 'api_key': { 'id': ..., 'api_key': ... 'token': ... } }, }, 'logging': { 'loglevel': 'INFO', 'logfile': ..., 'logformat': 'default', 'blacklist': ['elastic_transport', 'urllib3'] }, } The next level keys are further described below. :client: :py:class:`dict`: `(Optional)` :other_settings: :py:class:`dict`: `(Optional)` The acceptable sub-keys of **other_settings** are listed below. Anything listed as `(Optional)` will effectively be an empty value by default, rather than populated with the default value. :master_only: :py:class:`bool`: `(Optional)` Whether to execute on the elected master node or not. This has been used in the past to run a script (ostentibly Elasticsearch Curator) on every node in a cluster, but only execute if the node is the elected master. Not otherwise particularly useful, but preserved here due to its past usefulness. :skip_version_test: :py:class:`bool`: `(Optional)` ``es_client`` should only connect to versions covered. If set to ``True``, this will ignore those limitations and attempt to connect regardless. :username: :py:class:`int`: `(Optional)` If both ``username`` and ``password`` are provided, they will be used to create the necessary ``tuple`` for ``basic_auth``. An exception will be thrown if only one is provided. :password: :py:class:`int`: `(Optional)` If both ``username`` and ``password`` are provided, they will be used to create the necessary ``tuple`` for ``basic_auth``. An exception will be thrown if only one is provided. :api_key: :py:class:`dict`: `(Optional)` Can only contain the sub-keys ``token``, ``id``, and ``api_key``. ``token`` is the base64 encoded representation of ``id:api_key``. As such, if ``token`` is provided, it will override anything provided in ``id`` and ``api_key``. If ``token`` is not provided, both ``id`` and ``api_key`` must be either empty/``None``, or populated with the appropriate values for the ``hosts`` or ``cloud_id`` being connected to. The acceptable sub-keys of **client** are described at https://elasticsearch-py.readthedocs.io/en/latest/api.html#module-elasticsearch. Anything listed as `(Optional)` will effectively be an empty value by default, rather than populated with the default value. Anything of note regarding other options is mentioned below: :hosts: ``list(str)``: `(Optional)` List of hosts to use for connections. (default: ``http://127.0.0.1:9200``) :cloud_id: :py:class:`int`: `(Optional)` Cloud ID as provided by Elastic Cloud or ECE. This is mutually exclusive of ``hosts``, and if anything but the default value of ``hosts`` is used in conjunction with ``cloud_id`` it will result in an exception and will not connect. :api_key: ``Tuple[str, str]``: `(Optional)` Can be a ``tuple`` or ``None``. If using the ``token``, or ``api_key`` subkeys of ``id`` and ``api_key`` under ``other_settings``, this value will be built for you automatically. Regardless, this value must be in ``(id, api_key)`` tuple form and not Base64 form. :basic_auth: ``Tuple[str, str]``: `(Optional)` Can be a ``tuple`` or ``None``. If using the subkeys ``username`` and ``password`` under ``other_settings``, this value will be built for you automatically. Replaces ``http_auth`` in older versions. :headers: ``Mapping[str, str]``: `(Optional)` This is a :py:class:`dict` type and should be mapped as multiple key/value pairs. If using YAML files, these should be each on its own line, e.g.: :: elasticsearch: client: headers: key1: value1 key2: value2 ... keyN: valueN :connections_per_node: :py:class:`int`: `(Optional)` Number of connections allowed per node. Replaces former ``maxsize`` parameter. :http_compress: :py:class:`bool`: `(Optional)` Whether to compress http traffic or not. :verify_certs: :py:class:`bool`: `(Optional)` Whether to verify certificates or not. :ca_certs: :py:class:`int`: `(Optional)` optional path to CA bundle. If using https scheme and ``ca_certs`` is not configured, ``es_client`` will automatically use ``certifi`` provided certificates. :client_cert: :py:class:`int`: `(Optional)` path to the file containing the private key and the certificate, or cert only if using ``client_key`` :client_key: :py:class:`int`: `(Optional)` path to the file containing the private key if using separate cert and key files (``client_cert`` will contain only the cert) :ssl_assert_hostname: :py:class:`int`: `(Optional)` Hostname or IP address to verify on the node's certificate. This is useful if the certificate contains a different value than the one supplied in ``host``. An example of this situation is connecting to an IP address instead of a hostname. Set to ``False`` to disable certificate hostname verification. :ssl_assert_fingerprint: :py:class:`int`: SHA-256 fingerprint of the node's certificate. If this value is given then root-of-trust verification isn't done and only the node's certificate fingerprint is verified. On CPython 3.10+ this also verifies if any certificate in the chain including the Root CA matches this fingerprint. However because this requires using private APIs support for this is **experimental**. :ssl_version: :py:class:`int`: Minimum acceptable TLS/SSL version :ssl_context: :py:class:`ssl.SSLContext`: Pre-configured :py:class:`ssl.SSLContext` OBJECT. If this valueis given then no other TLS options (besides ``ssl_assert_fingerprint``) can be set on the :py:class:`~.elastic_transport.NodeConfig`. :ssl_show_warn: :py:class:`bool`: `(Optional)` :request_timeout: :py:class:`float`: `(Optional)` If unset, the default value from :py:class:`~.elasticsearch.Elasticsearch` is used, which is 10.0 seconds. .. _default_values: Constants and Settings ====================== Default values and constants shown here are used throughought the code. .. autodata:: es_client.defaults.VERSION_MIN .. autodata:: es_client.defaults.VERSION_MAX .. autodata:: es_client.defaults.KEYS_TO_REDACT .. autodata:: es_client.defaults.CLIENT_SETTINGS :annotation: .. autodata:: es_client.defaults.OTHER_SETTINGS .. autodata:: es_client.defaults.CLICK_SETTINGS :annotation: .. autodata:: es_client.defaults.ES_DEFAULT .. autodata:: es_client.defaults.ENV_VAR_PREFIX .. autodata:: es_client.defaults.LOGLEVEL .. autodata:: es_client.defaults.LOGFILE .. autodata:: es_client.defaults.LOGFORMAT .. autodata:: es_client.defaults.BLACKLIST .. autodata:: es_client.defaults.LOGDEFAULTS .. autodata:: es_client.defaults.LOGGING_SETTINGS :annotation: .. autodata:: es_client.defaults.SHOW_OPTION .. autodata:: es_client.defaults.SHOW_ENVVAR .. autofunction:: es_client.defaults.config_logging .. autofunction:: es_client.defaults.config_schemaes_client-8.17.4/docs/envvars.rst000066400000000000000000000132171476634113400167430ustar00rootroot00000000000000.. _envvars: ##################### Environment Variables ##################### Beginning in version 8.12, es_client allows you to use environment variables to configure settings *without* needing to specify a command-line option or a configuration file option. This should prove exceptionally useful in containerized applications like Kubernetes or Docker. Usage ----- A configuration file example: .. code-block:: yaml elasticsearch: client: hosts: http://127.0.0.1:9200 other_settings: username: user password: pass Which would be run as follows: .. code-block:: shell myapp.py --config /path/to/config.yml or a command-line example: .. code-block:: shell myapp.py --hosts http://127.0.0.1:9200 --username user --password pass Can *both* be executed with *no* configuration file and *no* command-line options as follows: .. code-block:: shell ESCLIENT_HOSTS=http://127.0.0.1:9200 ESCLIENT_USERNAME=user ESCLIENT_PASSWORD=pass myapp.py In Kubernetes or Docker based applications, these environment variables can be set in advance, making the program call exceptionally clean. Of course, you're still welcome to use a configuration file, but identify it with an environment variable: .. code-block:: shell ESCLIENT_CONFIG=/path/to/config.yml myapp.py List of Environment Variables ----------------------------- .. list-table:: Commonly Used Environment Variables :widths: 33 33 34 :header-rows: 1 * - Configuration File - Command-Line - Environment Variable * - - ``--config`` - ESCLIENT_CONFIG * - hosts - ``--hosts`` - :ref:`ESCLIENT_HOSTS ` * - cloud_id - ``--cloud_id`` - ESCLIENT_CLOUD_ID * - token - ``--api_token`` - ESCLIENT_API_TOKEN * - id - ``--id`` - ESCLIENT_ID * - api_key - ``--api_key`` - ESCLIENT_API_KEY * - username - ``--username`` - ESCLIENT_USERNAME * - password - ``--password`` - ESCLIENT_PASSWORD * - request_timeout - ``--request_timeout`` - ESCLIENT_REQUEST_TIMEOUT * - verify_certs - ``--verify_certs`` - :ref:`ESCLIENT_VERIFY_CERTS ` * - ca_certs - ``--ca_certs`` - ESCLIENT_CA_CERTS * - client_cert - ``--client_cert`` - ESCLIENT_CLIENT_CERT * - client_key - ``--client_key`` - ESCLIENT_CLIENT_KEY * - loglevel - ``--loglevel`` - ESCLIENT_LOGLEVEL * - logfile - ``--logfile`` - ESCLIENT_LOGFILE * - logformat - ``--logformat`` - ESCLIENT_LOGFORMAT .. list-table:: Uncommon Environment Variables :widths: 33 33 34 :header-rows: 1 * - Configuration File - Command-Line - Environment Variable * - blacklist - ``--blacklist`` - :ref:`ESCLIENT_BLACKLIST ` * - master_only - ``--master-only`` - :ref:`ESCLIENT_MASTER_ONLY ` * - skip_version_test - ``--skip_version_test`` - :ref:`ESCLIENT_SKIP_VERSION_TEST ` * - bearer_auth - ``--bearer_auth`` - ESCLIENT_BEARER_AUTH * - opaque_id - ``--opaque_id`` - ESCLIENT_OPAQUE_ID * - http_compress - ``--http_compress`` - :ref:`ESCLIENT_HTTP_COMPRESS ` * - ssl_version - ``--ssl_version`` - ESCLIENT_SSL_VERSION * - ssl_assert_hostname - ``--ssl_assert_hostname`` - ESCLIENT_SSL_ASSERT_HOSTNAME * - ssl_assert_fingerprint - ``--ssl_assert_fingerprint`` - ESCLIENT_SSL_ASSERT_FINGERPRINT .. _envvars_multiple: Settings With Multiple Values ----------------------------- .. list-table:: Settings With Multiple Values :widths: 33 33 34 :header-rows: 1 * - Configuration File - Command-Line - Environment Variable * - hosts - ``--hosts`` - ESCLIENT_HOSTS * - blacklist - ``--blacklist`` - ESCLIENT_BLACKLIST Where multiple values are permitted, as with the ``hosts`` and ``blacklist`` settings, this is done by simply specifying multiple values within quotes, e.g. .. code-block:: shell ESCLIENT_HOSTS="http://127.0.0.1:9200 http://localhost:9200" This will automatically expand into an array of values: .. code-block:: shell config: {'client': {'hosts': ['http://127.0.0.1:9200', 'http://localhost:9200']}}... .. _envvars_bool: Settings With Boolean Values ---------------------------- .. list-table:: Settings With Boolean Values :widths: 33 33 34 :header-rows: 1 * - Configuration File - Command-Line - Environment Variable * - verify_certs - ``--verify_certs`` - ESCLIENT_VERIFY_CERTS * - master_only - ``--master-only`` - ESCLIENT_MASTER_ONLY * - skip_version_test - ``--skip_version_test`` - ESCLIENT_SKIP_VERSION_TEST * - http_compress - ``--http_compress`` - ESCLIENT_HTTP_COMPRESS Where boolean values are accepted, as with the verify_certs setting, this is done with any acceptable boolean-eque value, e.g. 0, F, False for false values, or 1, T, True for true values: .. code-block:: shell ESCLIENT_MASTER_ONLY=true ESCLIENT_MASTER_ONLY=T ESCLIENT_MASTER_ONLY=1 Results in: .. code-block:: shell 'other_settings': {'master_only': True,... While: .. code-block:: shell ESCLIENT_MASTER_ONLY=false ESCLIENT_MASTER_ONLY=F ESCLIENT_MASTER_ONLY=0 Results in: .. code-block:: shell 'other_settings': {'master_only': False,... **NOTE: All string-based booleans are case-insensitive.** .. list-table:: Acceptable Boolean Values :widths: 50 50 :header-rows: 1 * - True - False * - 1 - 0 * - True, TRUE, true - False, FALSE, false * - T, t - F, f es_client-8.17.4/docs/example.rst000066400000000000000000000312771476634113400167200ustar00rootroot00000000000000.. _example: Example Script ############## This example command-line script file is part of the es_client source code and is at ``./es_client/cli_example.py``. The wrapper script ``run_script.py`` is at the root-level of the code at ``./run_script.py`` and will automatically target the ``cli_example.py`` script. ``es_client`` in Action ======================= Whether you have a running version of Elasticsearch or not, you can execute this script as outlined so long as the Python dependencies are installed. If you've cloned the github repository, this can be done by running the following command: Install Prerequisites --------------------- .. warning:: I highly recommend setting up a Python virtualenv of some kind before running ``pip`` .. code-block:: shell pip install -U '.[doc,test]' Run the Script with ``--help`` or ``-h`` ---------------------------------------- With the dependencies installed, the script should just run: .. code-block:: shell python run_script.py --help Running the command will show the command-line help/usage output: Output ^^^^^^ .. code-block:: shell-session Usage: run_script.py [OPTIONS] COMMAND [ARGS]... CLI Example Any text added to a docstring will show up in the --help/usage output. Set short_help='' in @func.command() definitions for each command for terse descriptions in the main help/usage output, as with show_all_options() in this example. Options: --config PATH Path to configuration file. --hosts TEXT Elasticsearch URL to connect to. --cloud_id TEXT Elastic Cloud instance id --api_token TEXT The base64 encoded API Key token --id TEXT API Key "id" value --api_key TEXT API Key "api_key" value --username TEXT Elasticsearch username --password TEXT Elasticsearch password --request_timeout FLOAT Request timeout in seconds --verify_certs / --no-verify_certs Verify SSL/TLS certificate(s) [default: verify_certs] --ca_certs TEXT Path to CA certificate file or directory --client_cert TEXT Path to client certificate file --client_key TEXT Path to client key file --loglevel [DEBUG|INFO|WARNING|ERROR|CRITICAL] Log level --logfile TEXT Log file --logformat [default|json|ecs] Log output format -v, --version Show the version and exit. -h, --help Show this message and exit. Commands: show-all-options Show all configuration options test-connection Test connection to Elasticsearch Run the Script with a Command ----------------------------- At the bottom of the usage/help output, you should see ``show-all-options`` and ``test-connection``. Let's re-run the script with ``show-all-options``: .. code-block:: shell python run_script.py show-all-options Perhaps you're confused to see another help/usage output. But there's a difference: Output ^^^^^^ .. code-block:: shell-session Usage: run_script.py show-all-options [OPTIONS] ALL OPTIONS SHOWN The full list of options available for configuring a connection at the command-line. Options: --config PATH Path to configuration file. [env var: ESCLIENT_CONFIG] --hosts TEXT Elasticsearch URL to connect to. [env var: ESCLIENT_HOSTS] --cloud_id TEXT Elastic Cloud instance id [env var: ESCLIENT_CLOUD_ID] --api_token TEXT The base64 encoded API Key token [env var: ESCLIENT_API_TOKEN] --id TEXT API Key "id" value [env var: ESCLIENT_ID] --api_key TEXT API Key "api_key" value [env var: ESCLIENT_API_KEY] --username TEXT Elasticsearch username [env var: ESCLIENT_USERNAME] --password TEXT Elasticsearch password [env var: ESCLIENT_PASSWORD] --bearer_auth TEXT Bearer authentication token [env var: ESCLIENT_BEARER_AUTH] --opaque_id TEXT X-Opaque-Id HTTP header value [env var: ESCLIENT_OPAQUE_ID] --request_timeout FLOAT Request timeout in seconds [env var: ESCLIENT_REQUEST_TIMEOUT] --http_compress / --no-http_compress Enable HTTP compression [env var: ESCLIENT_HTTP_COMPRESS] --verify_certs / --no-verify_certs Verify SSL/TLS certificate(s) [env var: ESCLIENT_VERIFY_CERTS] --ca_certs TEXT Path to CA certificate file or directory [env var: ESCLIENT_CA_CERTS] --client_cert TEXT Path to client certificate file [env var: ESCLIENT_CLIENT_CERT] --client_key TEXT Path to client key file [env var: ESCLIENT_CLIENT_KEY] --ssl_assert_hostname TEXT Hostname or IP address to verify on the node's certificate. [env var: ESCLIENT_SSL_ASSERT_HOSTNAME] --ssl_assert_fingerprint TEXT SHA-256 fingerprint of the node's certificate. If this value is given then root-of-trust verification isn't done and only the node's certificate fingerprint is verified. [env var: ESCLIENT_SSL_ASSERT_FINGERPRINT] --ssl_version TEXT Minimum acceptable TLS/SSL version [env var: ESCLIENT_SSL_VERSION] --master-only / --no-master-only Only run if the single host provided is the elected master [env var: ESCLIENT_MASTER_ONLY] --skip_version_test / --no-skip_version_test Elasticsearch version compatibility check [env var: ESCLIENT_SKIP_VERSION_TEST] --loglevel [DEBUG|INFO|WARNING|ERROR|CRITICAL] Log level [env var: ESCLIENT_LOGLEVEL] --logfile TEXT Log file [env var: ESCLIENT_LOGFILE] --logformat [default|json|ecs] Log output format [env var: ESCLIENT_LOGFORMAT] --blacklist TEXT Named entities will not be logged [env var: ESCLIENT_BLACKLIST] -v, --version Show the version and exit. -h, --help Show this message and exit. Run the Script with a Command (continued) ----------------------------------------- A closer look will show that this help output is slightly different, and shows options that the first run did not. This is on purpose. This is to show how you can use Click to show or hide options at the command line. This can be done for multiple reasons, including hiding sensitive information. In this case, however, it's mostly to keep things clean and as terse as possible by showing only the most frequently used options. Run the Script with a live host ------------------------------- Now that we've come this far, it's time to run against a live instance of Elasticsearch! Let's re-run the script with the command ``test-connection``. This time, unless we're using a local instance of Elasticsearch running on the default URL of http://127.0.0.1:9200, we will need to specify a few options. Your options may vary, but let's assume you have an Elasticsearch instance in `Elastic Cloud `_ and you have a cloud_id and an API key to use: If my cloud_id were ``example:REDACTED``, and my API key was also ``apikey:REDACTED``, I could run: .. code-block:: shell python run_script.py --cloud_id example:REDACTED --api_token apikey:REDACTED test-connection If your API key came in two pieces rather than the base64 encoded single token, that's okay! You can make that work, too: .. code-block:: shell python run_script.py --cloud_id example:REDACTED --api_key KEYVALUE --id IDVALUE test-connection Or maybe you don't have a cloud_id, but you have a URL, and a username and a password: .. code-block:: shell python run_script.py --hosts URL --username USER --password PASS test-connection Maybe you have a YAML configuration file with all the options you need to use: .. code-block:: shell python run_script.py --config /path/to/config.yaml test-connection There are so many ways you can slice and dice this! Output ^^^^^^ If all went well, you should see something like this: .. code-block:: shell-session Connection result: {'name': 'NODENAME', 'cluster_name': 'CLUSTERNAME', 'cluster_uuid': 'UUID', 'version': {'number': '8.12.0', 'build_flavor': 'default', 'build_type': 'docker', 'build_hash': 'HASH', 'build_date': '2024-01-11T10:05:27.953830042Z', 'build_snapshot': False, 'lucene_version': '9.9.1', 'minimum_wire_compatibility_version': '7.17.0', 'minimum_index_compatibility_version': '7.0.0'}, 'tagline': 'You Know, for Search'} Option Errata ============= Most of the options should be straightforward, but a few should be explained. Multiples --------- The command-line options ``--hosts`` and ``--blacklist`` can be used multiple times in the same command-line, e.g. .. code-block:: shell python run_script.py --hosts http://127.0.0.1:9200 --hosts http://127.0.0.2:9200 ... This is especially nice for reducing log volume with log blacklisting! See one you don't want or need? Run it again with another ``--blacklist`` entry! Configuration File Override --------------------------- You can use a YAML configuration file for all options. But you can also mix configuration file settings with command-line options. The thing to know is that command-line options will *always* supersede settings in a configuration file. ENVIRONMENT VARIABLES --------------------- Click makes it easy to use environment variables to pass values to options. In fact, it's now built in to ``es_client``! Any option can have an environment variable. All you need to do is prefix the uppercase name of the option with ``ESCLIENT_`` and replace any hyphens in the option name with underscores. You may have noticed that the environment variables were shown in the ``show-all-options`` output above and wondered what that meant. Well, now you know! .. code-block:: shell ESCLIENT_LOGLEVEL=DEBUG python run_script.py --hosts http://127.0.0.1:9200 Congratulations, you've now set loglevel to DEBUG with an environment variable! Multiples? ^^^^^^^^^^ How do environment variables work for parameters that can have multiple values? Great question! For the options ``es_client`` has that can do multiples, namely ``hosts`` and ``blacklist``, you need to put all values into a single environment variable and separate them with whitespace: .. code-block:: shell ESCLIENT_HOSTS='http://127.0.0.1:9200 http://localhost:9200' python run_script.py test-connection A quick look at the DEBUG log shows the following (redacted for brevity): .. code-block:: shell ... "Elasticsearch Configuration" config: {'hosts': ['http://127.0.0.1:9200', 'http://localhost:9200'], ... Yup! Multiple values from a single environment variable is possible! Flags, or boolean options? ^^^^^^^^^^^^^^^^^^^^^^^^^^ A quick look at the ``show-all-options`` output reveals that our boolean options (i.e., those with an on and off switch) show the defaults as the flag and not as True or False: .. code-block:: shell --http_compress / --no-http_compress Enable HTTP compression [env var: ESCLIENT_HTTP_COMPRESS; default: no-http_compress] Does this mean you have to set ``ES_CLIENT_COMPRESS`` to ``http_compress`` or ``no-http_compress``? No. In fact, don't do that. Click is very smart and can interpret most boolean-esque settings. True values: 1, True, true, TRUE (pretty sure it's case-insensitive) False values: 0, False, false, FALSE So here's the real-world example: .. code-block:: shell ESCLIENT_HTTP_COMPRESS=1 python run_script.py test-connection And in the debug log output (redacted for brevity): .. code-block:: shell "Elasticsearch Configuration" config: {'client': {'hosts': ..., 'http_compress': True, You can take my word for it, or you can test it for yourself. It works. .. _my_own_app: Next Step: Make Your Own App Using ``es_client`` ================================================ Visit the :ref:`tutorial` for the next step! .. _example_file: File Source Code ================ This file is part of the source code and is at ``./es_client/cli_example.py``. .. literalinclude:: ../src/es_client/cli_example.py :language: python .. _included_commands: Included Commands ================= This module is referenced by ``./es_client/cli_example.py`` and includes the ``show-all-options`` and ``test-connection`` functions/commands available when running from the CLI. .. literalinclude:: ../src/es_client/commands.py :language: python es_client-8.17.4/docs/exceptions.rst000066400000000000000000000006051476634113400174350ustar00rootroot00000000000000.. _exceptions: Exceptions ---------- .. autoexception:: es_client.exceptions.ESClientException :members: .. autoexception:: es_client.exceptions.ConfigurationError :members: .. autoexception:: es_client.exceptions.MissingArgument :members: .. autoexception:: es_client.exceptions.NotMaster :members: .. autoexception:: es_client.exceptions.LoggingException :members: es_client-8.17.4/docs/helpers.rst000066400000000000000000000011671476634113400167220ustar00rootroot00000000000000.. _helpers: Helpers ####### .. _helpers_config: Config ====== .. automodule:: es_client.helpers.config :members: :private-members: :undoc-members: .. _helpers_logging: Logging ======= .. automodule:: es_client.helpers.logging :members: :private-members: :undoc-members: :member-order: bysource .. _helpers_schemacheck: SchemaCheck =========== .. automodule:: es_client.helpers.schemacheck :members: :private-members: :undoc-members: :member-order: bysource .. _helpers_utils: Utils ===== .. automodule:: es_client.helpers.utils :members: :private-members: :undoc-members: es_client-8.17.4/docs/index.rst000066400000000000000000000120411476634113400163600ustar00rootroot00000000000000.. es_client documentation master file ``es_client`` Documentation =========================== You may wonder why this even exists, as at first glance it doesn't seem to make anything any easier than just using :py:class:`~.elasticsearch.Elasticsearch` to build a client connection. I needed to be able to reuse the more complex schema validation bits I was employing, namely: * ``master_only`` detection * Elasticsearch version checking and validation, and the option to skip this. * Configuration value validation, including file paths for SSL certificates, meaning: * No unknown keys or unacceptable parameter values are accepted * Acceptable values and ranges are established (where known)--and easy to amend, if necessary. But that's just the tip of the iceberg. That's only the :ref:`builder`! In addition to a Builder class, there's an entire set of :ref:`helpers` and a :ref:`tutorial` to show you how to build your own command-line interface like :ref:`this one `: .. code-block:: shell Usage: run_script.py [OPTIONS] COMMAND [ARGS]... CLI Example Options: --config PATH Path to configuration file. --hosts TEXT Elasticsearch URL to connect to. --cloud_id TEXT Elastic Cloud instance id --api_token TEXT The base64 encoded API Key token --id TEXT API Key "id" value --api_key TEXT API Key "api_key" value --username TEXT Elasticsearch username --password TEXT Elasticsearch password --request_timeout FLOAT Request timeout in seconds --verify_certs / --no-verify_certs Verify SSL/TLS certificate(s) [default: verify_certs] --ca_certs TEXT Path to CA certificate file or directory --client_cert TEXT Path to client certificate file --client_key TEXT Path to client key file --loglevel [DEBUG|INFO|WARNING|ERROR|CRITICAL] Log level --logfile TEXT Log file --logformat [default|json|ecs] Log output format -v, --version Show the version and exit. -h, --help Show this message and exit. Commands: show-all-options Show all configuration options test-connection Test connection to Elasticsearch So, if you don't need these, then this library probably isn't what you're looking for. If you do want features like these, then you've come to the right place. Example Builder Class Usage --------------------------- .. code-block:: python from es_client import Builder config = { 'elasticsearch': { 'client': { 'hosts': 'https://10.0.0.123:9200', 'ca_certs': '/etc/elasticsearch/certs/ca.crt', 'request_timeout': 60, }, 'other_settings': { 'master_only': True, 'username': 'joe_user', 'password': 'password', } }, 'logging': { 'loglevel': 'INFO', 'logfile': '/path/to/file.log', 'logformat': 'default', 'blacklist': ['elastic_transport', 'urllib3'] } } builder = Builder(configdict=config) try: builder.connect() except: # Do exception handling here... client = builder.client Additionally, you can read from a YAML configuration file: .. code-block:: yaml --- elasticsearch: client: hosts: https://10.0.0.123:9200 ca_certs: /etc/elasticsearch/certs/ca.crt request_timeout: 60 other_settings: master_only: true username: joe_user password: password logging: loglevel: INFO logfile: /path/to/file.log logformat: default blacklist: ['elastic_transport', 'urllib3'] .. code-block:: python from es_client import Builder builder = Builder(configfile='/path/to/es_client.yml') try: builder.connect() except: # Do exception handling here... client = builder.client The same schema validations apply here as well. License ------- Copyright (c) 2022-2024 Aaron Mildenstein Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Contents -------- .. toctree:: api example tutorial advanced envvars defaults helpers exceptions Changelog :maxdepth: 5 Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` es_client-8.17.4/docs/requirements.txt000066400000000000000000000001051476634113400200010ustar00rootroot00000000000000elasticsearch8==8.17.1 voluptuous>=0.14.2 pyyaml==6.0.2 pint>=0.19.2 es_client-8.17.4/docs/tutorial.rst000066400000000000000000000431461476634113400171260ustar00rootroot00000000000000.. _tutorial: ######## Tutorial ######## ************************* Create A Command-Line App ************************* If you haven't gone through the :ref:`example` yet, you should do a once-over there before proceeding here. Now that we see the power of the command-line that is ready for the taking, what's the next step? How do you make your own app work with ``es_client``? As StackOverflow as it may sound, feel free to clone the :ref:`example file ` and :ref:`included commands ` and start there. I've done the ground work so you don't have to. .. important:: All of these examples assume you have a simple Elasticsearch instance running at localhost:9200 that may or may not require a username or password. This can, in fact, be done using the ``docker_test`` scripts included in the Github repository. Run ``docker_test/create.sh 8.13.2`` to create such an image locally (substitute the version of your choice), and ``docker_test/destroy.sh`` to remove them when you're done. These Docker images will export necessary settings to a ``.env`` file in the root directory of your git fork of ``es_client``, and the CA certificate will be put alongside it, named ``http_ca.crt``. The tests, as presently constituted, are already configured to use these settings and values. After running ``docker_test/create.sh 8.13.2``, simply run ``pytest`` to see it work. Don't forget to run ``docker_test/destroy.sh`` after you've run the tests--for now, anyway. I will probably have pytest run the ``create.sh`` and ``destroy.sh`` in the future as part of test setup and teardown (at the ``scope='session'`` level). Once this ``.env`` file is created, to run these tests, you should only need to run: ``source .env`` from the root directory of your project. If you do not have Docker, or choose to use a different cluster, you're responsible for adding whatever configuration options/flags are needed to connect. And I am not at all responsible if you delete an index in production because you did something you shouldn't have. .. _tutorial_step_1: ***************** Add a New Command ***************** To make things really simple, we can just add a new command. We already have 2 commands: .. code-block:: console Commands: show-all-options Show all configuration options test-connection Test connection to Elasticsearch A look at the code in :ref:`commands.py ` shows us where that name came from: .. code-block:: python @click.command() @click.pass_context def test_connection(ctx): """ Test connection to Elasticsearch """ # Because of `@click.pass_context`, we can access `ctx.obj` here from the `run` function # that made it: client = get_client(configdict=ctx.obj['configdict']) # If we're here, we'll see the output from GET http(s)://hostname.tld:PORT click.secho('\nConnection result: ', bold=True) click.secho(f'{client.info()}\n') Yeah, it really is that simple. The name of the function becomes the name of the command. Also note that ``@click.command()`` decorator above the ``@click.pass_context`` decorator. These are both absolutely necessary. You probably scrolled through :ref:`cli_example.py ` and noticed all of the decorators above the ``run`` function and recognized that's where all of the options come from. That's it! It's actually easier than it looks. The ``@click.command()`` decorator simply says that this function should be recognized as a viable ``click`` command. There's an additional step required to add a command as a choice at run time: In :ref:`cli_example.py `, we find: .. code-block:: python # Near the top: from es_client.commands import show_all_options, test_connection # Around line 62: @click.group(context_settings=cfg.context_settings()) @cfg.options_from_dict(OPTION_DEFAULTS) @click.version_option(None, "-v", "--version", prog_name="cli_example") @click.pass_context def run(): # The rest of the definition of run follows... # Then near the bottom: run.add_command(show_all_options) run.add_command(test_connection) These lines means we're adding the ``@click.command()`` definitions decorating functions ``show_all_options`` and ``test_connection`` to the ``@click.group()`` attached to function ``run``. So let's copy the entire ``test_connection`` function to ``commands.py`` and make a few changes: .. code-block:: python @click.command() @click.pass_context def delete_index(ctx): """ Delete an Elasticsearch Index """ # Because of `@click.pass_context`, we can access `ctx.obj` here from the `run` function # that made it: client = get_client(configdict=ctx.obj['configdict']) # If we're here, we'll see the output from GET http(s)://hostname.tld:PORT click.secho('\nConnection result: ', bold=True) click.secho(f'{client.info()}\n') So what's different now? We renamed our copied function to ``delete_index``. We also changed the Python docstring--that's the part in between the triple quotes underneath the function name. Now in ``cli_example.py``, we need to add this function name to our import list (near the top): .. code-block:: python from es_client.commands import show_all_options, test_connection, delete_index And add a new ``run.add_command()`` line as well (near the bottom): .. code-block:: python run.add_command(delete_index) Let's see what this looks like when we run the basic help output: .. code-block:: console python run_script.py -h Now the output has a difference at the bottom: .. code-block:: console Commands: delete-index Delete an Elasticsearch Index show-all-options Show all configuration options test-connection Test connection to Elasticsearch Cool! Now our new command, ``delete-index`` is starting to take shape. Did you see how the value in the docstring became the description for our new command? .. note:: Our function is named ``delete_index`` but the command is hyphenated: ``delete-index``. .. _tutorial_step_2: ************* Add an Option ************* While our function is named differently and has a different description, it's identical to the ``test-connections`` command still. Let's make a few more changes. .. code-block:: python @click.command() @click.option('--index', help='An index name', type=str) @click.pass_context def delete_index(ctx, index): """ Delete an Elasticsearch Index """ # Because of `@click.pass_context`, we can access `ctx.obj` here from the `run` function # that made it: client = get_client(configdict=ctx.obj['configdict']) # If we're here, we'll see the output from GET http(s)://hostname.tld:PORT click.secho('\nConnection result: ', bold=True) click.secho(f'{client.info()}\n') So, two more changes. We added a new option via one of those clever decorators. Please note that this is the direct way to add an option. The ones you see in the example are using stored default options. For right now, this is all we need. This decorator is telling Click that the command ``delete_index`` now needs to have an option, ``--index``, which has its own helpful description, and we tell Click to reject any non-string values because ``type=str``. Also note that we need to add our new option as a variable in the function definition: .. code-block:: python def delete_index(ctx, index): .. note:: Any options or arguments added need to have variables added this way in the same order as the decorators. Let's run this and see what we get. This time we'll actually run the help on our new command: .. code-block:: console python run_script.py delete-index -h The output from this is pretty cool: .. code-block:: console Usage: run_script.py delete-index [OPTIONS] Delete an Elasticsearch Index Options: --index TEXT An index name -h, --help Show this message and exit. So here we see our command name, ``delete-index``, a positional holder for ``OPTIONS`` which is in square braces because they are optional, our docstring again, and a list of accepted options which now includes ``--index``, and a standard help block. .. _tutorial_step_3: ************** Add in Logging ************** This won't actually delete an index yet. We'll get to that part in a bit. First, let's add some logging: .. code-block:: python @click.command() @click.option('--index', help='An index name', type=str) @click.pass_context def delete_index(ctx, index): """ Delete an Elasticsearch Index """ logger = logging.getLogger(__name__) logger.info("Let's delete index: %s", index) logger.info("But first, let's connect to Elasticsearch...") client = get_client(configdict=ctx.obj['configdict']) So we deleted some comments, and added 3 lines. The first one says, "create an instance of logger." The second and third use that ``logger`` at ``info`` level to write some log lines. The first includes a string substitution ``%s`` which means, "put the contents of variable ``index`` where the ``%s`` is. It should be noted that logging was already "enabled" in the ``run`` function by the ``configure_logging(ctx)`` function call. Whatever log options were set when we got to that point, whether from a YAML configuration file via ``--config``, or by ``--loglevel``, ``--logfile``, or ``--logformat``, will be in effect before our ``delete_index`` function is ever called. So let's run this much. Go ahead and put in a dummy index name here. There's no deletes happening yet: .. code-block:: console python run_script.py delete-index --index myindex Note that we are omitting the help flag this time. .. code-block:: console 2024-02-03 23:44:25,569 INFO Let's delete index: myindex 2024-02-03 23:44:25,569 INFO But first, let's connect to Elasticsearch... Look at that! We're getting more done. .. _tutorial_step_4: ************************ Add the try/except Logic ************************ So now we have a logger and an Elasticsearch client. Let's add in a delete API call with some "try" logic and see what happens: .. code-block:: python @click.command() @click.option('--index', help='An index name', type=str) @click.pass_context def delete_index(ctx, index): """ Delete an Elasticsearch Index """ logger = logging.getLogger(__name__) logger.info("Let's delete index: %s", index) logger.info("But first, let's connect to Elasticsearch...") client = get_client(configdict=ctx.obj['configdict']) logger.info("We're connected!") result = 'FAIL' try: result = client.indices.delete(index=index) except NotFoundError as exc: logger.error("While trying to delete: %s, an error occurred: %s", index, exc.error) logger.info('Index deletion result: %s', result) You probably thought I wasn't going to notice that we are attempting to delete an index on an empty test cluster. I know what the score is here. The lines we've added here are not just to inform us when we try to delete an index that's not there, but also to keep the program from dying unexpectedly. If we did not put in this ``try`` / ``except`` block, the program would have exited silently after logging, "We're connected". Go ahead. Try it and see. .. code-block:: console 2024-02-04 00:24:17,409 INFO Let's delete index myindex 2024-02-04 00:24:17,409 INFO But first, let's connect to Elasticsearch... 2024-02-04 00:24:17,422 INFO We're connected! 2024-02-04 00:24:17,424 ERROR While trying to delete: myindex, an error occurred: index_not_found_exception 2024-02-04 00:24:17,424 INFO Index deletion result: FAIL FAIL? Wait, why am I here? .. _tutorial_step_5: *************** COPY PASTE! GO! *************** Well, I don't blame you for not wanting to waste your time. So what good is it that we have a delete function without any indexes to delete? Hmmmmmmm... Begin the COPY PASTE! GO! .. code-block:: python @click.command() @click.option('--index', help='An index name', type=str) @click.pass_context def create_index(ctx, index): """ Create an Elasticsearch Index """ logger = logging.getLogger(__name__) logger.info("Let's create index: %s", index) logger.info("But first, let's connect to Elasticsearch...") client = get_client(configdict=ctx.obj['configdict']) logger.info("We're connected!") result = 'FAIL' try: result = client.indices.create(index=index) except BadRequestError as exc: logger.error("While trying to create: %s, an error occurred: %s", index, exc.error) logger.info('Index creation result: %s', result) You'll note very few differences here in this copy/paste: * Our function name is ``create_index`` * Our docstring also says ``Create`` * Our API call is now ``client.indices.create`` instead of ``delete`` * Our ``except`` is looking for ``BadRequestError``. We expect a index we want to create to not be found, so a ``NotFoundError`` doesn't make much sense here. Instead, if we try to create an index that's already existing, that would be a bad request. * Our final log message is indicating a ``creation`` result. After adding our new function to our import line in ``cli_example.py``: .. code-block:: python from es_client.commands import ( show_all_options, test_connection, delete_index, create_index ) And another new ``run.add_command()`` line as well (add it after the others): .. code-block:: python run.add_command(create_index) Let's see our main usage/help page tail now: .. code-block:: console Commands: create-index Create an Elasticsearch Index delete-index Delete an Elasticsearch Index show-all-options Show all configuration options test-connection Test connection to Elasticsearch Look at all those commands now! .. _tutorial_step_6: *********************** Let's Run Some Commands *********************** ===================== Let's create an index ===================== .. code-block:: console python run_script.py create-index --index myindex 2024-02-04 00:30:45,160 INFO Let's create index: myindex 2024-02-04 00:30:45,160 INFO But first, let's connect to Elasticsearch... 2024-02-04 00:30:45,174 INFO We're connected! 2024-02-04 00:30:45,255 INFO Index creation result: {'acknowledged': True, 'shards_acknowledged': True, 'index': 'myindex'} AHA! Our creation result isn't ``FAIL``! What happens if we run it again, though? .. code-block:: console python run_script.py create-index --index myindex 2024-02-04 00:32:24,603 INFO Let's create index: myindex 2024-02-04 00:32:24,603 INFO But first, let's connect to Elasticsearch... 2024-02-04 00:32:24,613 INFO We're connected! 2024-02-04 00:32:24,617 ERROR While trying to create: myindex, an error occurred: resource_already_exists_exception 2024-02-04 00:32:24,617 INFO Index creation result: FAIL FAIL, but to be expected, right? ===================== Let's delete an index ===================== .. code-block:: console python run_script.py delete-index --index myindex 2024-02-04 00:33:41,396 INFO Let's delete index myindex 2024-02-04 00:33:41,397 INFO But first, let's connect to Elasticsearch... 2024-02-04 00:33:41,405 INFO We're connected! 2024-02-04 00:33:41,436 INFO Index deletion result: {'acknowledged': True} This is pretty fun, right? .. _tutorial_step_7: **************** Just Making Sure **************** So, one last parting idea. Suppose we want to prompt our users with an, "Are you sure you want to do this?" message. How would we go about doing that? With the ``confirmation_option()`` decorator, Like this: .. code-block:: python @click.command() @click.option('--index', help='An index name', type=str) @click.confirmation_option() @click.pass_context def delete_index(ctx, index): By adding ``@click.confirmation_option()``, we can make our command ask us to confirm before proceding: =========== Help Output =========== .. code-block:: console python run_script.py delete-index -h Usage: run_script.py delete-index [OPTIONS] Delete an Elasticsearch Index Options: --index TEXT An index name --yes Confirm the action without prompting. -h, --help Show this message and exit. You can see the ``--yes`` option in there now. =============== Run and decline =============== .. code-block:: console python run_script.py delete-index --index myindex Do you want to continue? [y/N]: N Aborted! =============== Run and confirm =============== .. code-block:: console python run_script.py delete-index --index myindex Do you want to continue? [y/N]: y 2024-02-04 00:43:47,193 INFO Let's delete index myindex 2024-02-04 00:43:47,193 INFO But first, let's connect to Elasticsearch... 2024-02-04 00:43:47,207 INFO We're connected! 2024-02-04 00:43:47,229 INFO Index deletion result: {'acknowledged': True} ============================= Run with the ``--yes`` option ============================= .. code-block:: console python run_script.py delete-index --index myindex --yes 2024-02-04 00:44:29,313 INFO Let's delete index myindex 2024-02-04 00:44:29,313 INFO But first, let's connect to Elasticsearch... 2024-02-04 00:44:29,322 INFO We're connected! 2024-02-04 00:44:29,356 INFO Index deletion result: {'acknowledged': True} You can see that it does not prompt you if you specify the flag. That's it for our little tutorial! es_client-8.17.4/example.yml000066400000000000000000000002001476634113400157370ustar00rootroot00000000000000--- elasticsearch: client: hosts: "http://127.0.0.1:9200" other_settings: username: joe_user password: password es_client-8.17.4/mypy.ini000066400000000000000000000000651476634113400152710ustar00rootroot00000000000000[mypy] plugins = returns.contrib.mypy.returns_plugin es_client-8.17.4/pylintrc.toml000066400000000000000000000513361476634113400163420ustar00rootroot00000000000000[tool.pylint.main] # Analyse import fallback blocks. This can be used to support both Python 2 and 3 # compatible code, which means that the block might have code that exists only in # one or another interpreter, leading to false positives when analysed. # analyse-fallback-blocks = # Clear in-memory caches upon conclusion of linting. Useful if running pylint in # a server-like mode. # clear-cache-post-run = # Always return a 0 (non-error) status code, even if lint errors are found. This # is primarily useful in continuous integration scripts. # exit-zero = # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. # extension-pkg-allow-list = # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) # extension-pkg-whitelist = # Return non-zero exit code if any of these messages/categories are detected, # even if score is above --fail-under value. Syntax same as enable. Messages # specified are enabled, while categories only check already-enabled messages. # fail-on = # Specify a score threshold under which the program will exit with error. fail-under = 10.0 # Interpret the stdin as a python script, whose filename needs to be passed as # the module_or_package argument. # from-stdin = # Files or directories to be skipped. They should be base names, not paths. ignore = ["CVS"] # Add files or directories matching the regular expressions patterns to the # ignore-list. The regex matches against paths and can be in Posix or Windows # format. Because '\\' represents the directory delimiter on Windows systems, it # can't be used as an escape character. # ignore-paths = # Files or directories matching the regular expression patterns are skipped. The # regex matches against base names, not paths. The default value ignores Emacs # file locks ignore-patterns = ["^\\.#"] # List of module names for which member attributes should not be checked (useful # for modules/projects where namespaces are manipulated during runtime and thus # existing member attributes cannot be deduced by static analysis). It supports # qualified module names, as well as Unix pattern matching. # ignored-modules = # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). # init-hook = # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use, and will cap the count on Windows to # avoid hangs. jobs = 1 # Control the amount of potential inferred values when inferring a single object. # This can help the performance when dealing with large functions or complex, # nested conditions. limit-inference-results = 100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. # load-plugins = # Pickle collected data for later comparisons. persistent = true # Minimum Python version to use for version dependent checks. Will default to the # version used to run pylint. py-version = "3.12" # Discover python modules and packages in the file system subtree. # recursive = # Add paths to the list of the source roots. Supports globbing patterns. The # source root is an absolute path or a path relative to the current working # directory used to determine a package namespace for modules located under the # source root. # source-roots = # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode = true # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. # unsafe-load-any-extension = [tool.pylint.basic] # Naming style matching correct argument names. argument-naming-style = "snake_case" # Regular expression matching correct argument names. Overrides argument-naming- # style. If left empty, argument names will be checked with the set naming style. # argument-rgx = # Naming style matching correct attribute names. attr-naming-style = "snake_case" # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. # attr-rgx = # Bad variable names which should always be refused, separated by a comma. bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused # bad-names-rgxs = # Naming style matching correct class attribute names. class-attribute-naming-style = "any" # Regular expression matching correct class attribute names. Overrides class- # attribute-naming-style. If left empty, class attribute names will be checked # with the set naming style. # class-attribute-rgx = # Naming style matching correct class constant names. class-const-naming-style = "UPPER_CASE" # Regular expression matching correct class constant names. Overrides class- # const-naming-style. If left empty, class constant names will be checked with # the set naming style. # class-const-rgx = # Naming style matching correct class names. class-naming-style = "PascalCase" # Regular expression matching correct class names. Overrides class-naming-style. # If left empty, class names will be checked with the set naming style. # class-rgx = # Naming style matching correct constant names. const-naming-style = "UPPER_CASE" # Regular expression matching correct constant names. Overrides const-naming- # style. If left empty, constant names will be checked with the set naming style. # const-rgx = # Minimum line length for functions/classes that require docstrings, shorter ones # are exempt. docstring-min-length = -1 # Naming style matching correct function names. function-naming-style = "snake_case" # Regular expression matching correct function names. Overrides function-naming- # style. If left empty, function names will be checked with the set naming style. # function-rgx = # Good variable names which should always be accepted, separated by a comma. good-names = ["i", "j", "k", "ex", "Run", "_"] # Good variable names regexes, separated by a comma. If names match any regex, # they will always be accepted # good-names-rgxs = # Include a hint for the correct naming format with invalid-name. # include-naming-hint = # Naming style matching correct inline iteration names. inlinevar-naming-style = "any" # Regular expression matching correct inline iteration names. Overrides # inlinevar-naming-style. If left empty, inline iteration names will be checked # with the set naming style. # inlinevar-rgx = # Naming style matching correct method names. method-naming-style = "snake_case" # Regular expression matching correct method names. Overrides method-naming- # style. If left empty, method names will be checked with the set naming style. # method-rgx = # Naming style matching correct module names. module-naming-style = "snake_case" # Regular expression matching correct module names. Overrides module-naming- # style. If left empty, module names will be checked with the set naming style. # module-rgx = # Colon-delimited sets of names that determine each other's naming style when the # name regexes allow several styles. # name-group = # Regular expression which should only match function or class names that do not # require a docstring. no-docstring-rgx = "^_" # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. These # decorators are taken in consideration only for invalid-name. property-classes = ["abc.abstractproperty"] # Regular expression matching correct type alias names. If left empty, type alias # names will be checked with the set naming style. # typealias-rgx = # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. # typevar-rgx = # Naming style matching correct variable names. variable-naming-style = "snake_case" # Regular expression matching correct variable names. Overrides variable-naming- # style. If left empty, variable names will be checked with the set naming style. # variable-rgx = [tool.pylint.classes] # Warn about protected attribute access inside special methods # check-protected-access-in-special-methods = # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] # List of member names, which should be excluded from the protected access # warning. exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] # List of valid names for the first argument in a class method. valid-classmethod-first-arg = ["cls"] # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg = ["mcs"] [tool.pylint.design] # List of regular expressions of class ancestor names to ignore when counting # public methods (see R0903) # exclude-too-few-public-methods = # List of qualified class names to ignore when counting class parents (see R0901) # ignored-parents = # Maximum number of arguments for function / method. max-args = 5 # Maximum number of attributes for a class (see R0902). max-attributes = 7 # Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr = 5 # Maximum number of branch for function / method body. max-branches = 12 # Maximum number of locals for function / method body. max-locals = 15 # Maximum number of parents for a class (see R0901). max-parents = 7 # Maximum number of public methods for a class (see R0904). max-public-methods = 20 # Maximum number of return / yield for function / method body. max-returns = 6 # Maximum number of statements in function / method body. max-statements = 50 # Minimum number of public methods for a class (see R0903). min-public-methods = 2 [tool.pylint.exceptions] # Exceptions that will emit a warning when caught. overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] [tool.pylint.format] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. # expected-line-ending-format = # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines = "^\\s*(# )??$" # Number of spaces of indent required inside a hanging or continued line. indent-after-paren = 4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string = " " # Maximum number of characters on a single line. max-line-length = 88 # Maximum number of lines in a module. max-module-lines = 1000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. # single-line-class-stmt = # Allow the body of an if to be on the same line as the test if there is no else. # single-line-if-stmt = [tool.pylint.imports] # List of modules that can be imported at any level, not just the top level one. # allow-any-import-level = # Allow explicit reexports by alias from a package __init__. # allow-reexport-from-package = # Allow wildcard imports from modules that define __all__. # allow-wildcard-with-all = # Deprecated modules which should not be used, separated by a comma. # deprecated-modules = # Output a graph (.gv or any supported image format) of external dependencies to # the given file (report RP0402 must not be disabled). # ext-import-graph = # Output a graph (.gv or any supported image format) of all (i.e. internal and # external) dependencies to the given file (report RP0402 must not be disabled). # import-graph = # Output a graph (.gv or any supported image format) of internal dependencies to # the given file (report RP0402 must not be disabled). # int-import-graph = # Force import order to recognize a module as part of the standard compatibility # libraries. # known-standard-library = # Force import order to recognize a module as part of a third party library. known-third-party = ["enchant"] # Couples of modules and preferred modules, separated by a comma. # preferred-modules = [tool.pylint.logging] # The type of string formatting that logging methods do. `old` means using % # formatting, `new` is for `{}` formatting. logging-format-style = "old" # Logging modules to check that the string format arguments are in logging # function parameter format. logging-modules = ["logging"] [tool.pylint."messages control"] # Only show warnings with the listed confidence levels. Leave empty to show all. # Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] # Disable the message, report, category or checker with the given id(s). You can # either give multiple identifiers separated by comma (,) or put this option # multiple times (only on the command line, not in the configuration file where # it should appear only once). You can also use "--disable=all" to disable # everything first and then re-enable specific checks. For example, if you want # to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-symbolic-message-instead", "use-implicit-booleaness-not-comparison-to-string", "use-implicit-booleaness-not-comparison-to-zero"] # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where it # should appear only once). See also the "--disable" option for examples. # enable = [tool.pylint.method_args] # List of qualified names (i.e., library.method) which require a timeout # parameter e.g. 'requests.api.get,requests.api.post' timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"] [tool.pylint.miscellaneous] # List of note tags to take in consideration, separated by a comma. notes = ["FIXME", "XXX", "TODO"] # Regular expression of note tags to take in consideration. # notes-rgx = [tool.pylint.refactoring] # Maximum number of nested blocks for function / method body max-nested-blocks = 5 # Complete name of functions that never returns. When checking for inconsistent- # return-statements if a never returning function is called then it will be # considered as an explicit return statement and no message will be printed. never-returning-functions = ["sys.exit", "argparse.parse_error"] # Let 'consider-using-join' be raised when the separator to join on would be non- # empty (resulting in expected fixes of the type: ``"- " + " - ".join(items)``) suggest-join-with-non-empty-separator = true [tool.pylint.reports] # Python expression which should return a score less than or equal to 10. You # have access to the variables 'fatal', 'error', 'warning', 'refactor', # 'convention', and 'info' which contain the number of messages in each category, # as well as 'statement' which is the total number of statements analyzed. This # score is used by the global evaluation report (RP0004). evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details. # msg-template = # Set the output format. Available formats are: text, parseable, colorized, json2 # (improved json format), json (old json format) and msvs (visual studio). You # can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass. # output-format = # Tells whether to display a full report or only the messages. # reports = # Activate the evaluation score. score = true [tool.pylint.similarities] # Comments are removed from the similarity computation ignore-comments = true # Docstrings are removed from the similarity computation ignore-docstrings = true # Imports are removed from the similarity computation ignore-imports = true # Signatures are removed from the similarity computation ignore-signatures = true # Minimum lines number of a similarity. min-similarity-lines = 4 [tool.pylint.spelling] # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions = 4 # Spelling dictionary name. No available dictionaries : You need to install both # the python package and the system dependency for enchant to work. # spelling-dict = # List of comma separated words that should be considered directives if they # appear at the beginning of a comment and should not be checked. spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:" # List of comma separated words that should not be checked. # spelling-ignore-words = # A path to a file that contains the private dictionary; one word per line. # spelling-private-dict-file = # Tells whether to store unknown words to the private dictionary (see the # --spelling-private-dict-file option) instead of raising a message. # spelling-store-unknown-words = [tool.pylint.typecheck] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators = ["contextlib.contextmanager"] # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. # generated-members = # Tells whether missing members accessed in mixin class should be ignored. A # class is considered mixin if its name matches the mixin-class-rgx option. # Tells whether to warn about missing members when the owner of the attribute is # inferred to be None. ignore-none = true # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference can # return multiple potential results while evaluating a Python object, but some # branches might not be evaluated, which results in partial inference. In that # case, it might be useful to still emit no-member and other checks for the rest # of the inferred objects. ignore-on-opaque-inference = true # List of symbolic message names to ignore for Mixin members. ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"] # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "argparse.Namespace"] # Show a hint with possible names when a member name was not found. The aspect of # finding the hint is based on edit distance. missing-member-hint = true # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance = 1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices = 1 # Regex pattern to define which classes are considered mixins. mixin-class-rgx = ".*[Mm]ixin" # List of decorators that change the signature of a decorated function. # signature-mutators = [tool.pylint.variables] # List of additional names supposed to be defined in builtins. Remember that you # should avoid defining new builtins when possible. # additional-builtins = # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables = true # List of names allowed to shadow builtins # allowed-redefined-builtins = # List of strings which can identify a callback function by name. A callback name # must start or end with one of those strings. callbacks = ["cb_", "_cb"] # A regular expression matching the name of dummy variables (i.e. expected to not # be used). dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" # Argument names that match this expression will be ignored. ignored-argument-names = "_.*|^ignored_|^unused_" # Tells whether we should check for unused import in __init__ files. # init-import = # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] es_client-8.17.4/pyproject.toml000066400000000000000000000052071476634113400165110ustar00rootroot00000000000000[build-system] requires = ['hatchling'] build-backend = 'hatchling.build' [project] name = 'es_client' dynamic = ['version'] description = 'Elasticsearch Client builder, complete with schema validation' authors = [{name = 'Aaron Mildenstein', email = 'aaron@mildensteins.com'}] readme = 'README.rst' requires-python = '>=3.8' license = { text='Apache-2.0' } classifiers = [ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', ] keywords = [ 'elasticsearch', 'client', 'connect', 'command-line' ] dependencies = [ 'elasticsearch8==8.17.2', 'ecs-logging==2.2.0', 'dotmap==1.3.30', 'click==8.1.8', 'pyyaml==6.0.2', 'voluptuous>=0.14.2', 'certifi>=2025.1.31' ] [project.optional-dependencies] test = [ 'requests', 'pytest>=7.2.1', 'pytest-cov', 'pytest-dotenv', ] doc = ['sphinx', 'sphinx_rtd_theme'] [project.urls] 'Homepage' = 'https://github.com/untergeek/es_client' 'Bug Tracker' = 'https://github.com/untergeek/es_client/issues' 'Documentation' = 'https://es-client.readthedocs.io/' 'Source Code' = 'http://github.com/untergeek/es_client' 'Release Notes' = 'https://github.com/untergeek/es_client/releases' [tool.hatch.version] path = 'src/es_client/__init__.py' [tool.hatch.build.targets.sdist] exclude = [ 'dist', 'docs', 'docker_test', 'examples', 'html_docs', 'tests', 'cli.py', 'example.yml', 'pytest.ini', 'src/local_test.py', ] [tool.pytest.ini_options] pythonpath = ['.', 'src/es_client'] minversion = '7.2' addopts = '-ra -q' testpaths = [ 'tests/unit', 'tests/integration', ] # Lint environment [tool.hatch.envs.lint.scripts] run-black = 'black --quiet --check --diff {args:.}' python = ['run-black'] all = ['python'] # Test environment [[tool.hatch.envs.test.matrix]] python = ['3.8', '3.9', '3.10', '3.11', '3.12'] [tool.hatch.envs.test] dependencies = [ 'requests', 'pytest >=7.2.1', 'pytest-cov', 'pytest-dotenv', ] [tool.hatch.envs.test.scripts] test = 'pytest' test-cov = 'pytest --cov=es_client' cov-report = 'pytest --cov=es_client --cov-report html:cov_html' [tool.coverage.report] exclude_lines = [ 'no cov', 'if __name__ == .__main__.:', 'if TYPE_CHECKING:', ] [tool.black] target-version = ['py38'] line-length = 88 skip-string-normalization = true include = '\.pyi?$' es_client-8.17.4/pytest.ini000066400000000000000000000002101476634113400156130ustar00rootroot00000000000000[pytest] #log_cli=true env_files = .env log_format = %(asctime)s %(levelname)-9s %(name)22s %(funcName)22s:%(lineno)-4d %(message)s es_client-8.17.4/run_script.py000077500000000000000000000006021476634113400163340ustar00rootroot00000000000000#!/usr/bin/env python """Script to run locally""" from click import echo from es_client.cli_example import run if __name__ == '__main__': try: # This is because click uses decorators, and pylint doesn't catch that # pylint: disable=no-value-for-parameter run() except RuntimeError as err: import sys echo(f'{err}') sys.exit(1) es_client-8.17.4/src/000077500000000000000000000000001476634113400143605ustar00rootroot00000000000000es_client-8.17.4/src/es_client/000077500000000000000000000000001476634113400163255ustar00rootroot00000000000000es_client-8.17.4/src/es_client/__init__.py000066400000000000000000000001361476634113400204360ustar00rootroot00000000000000"""Module Init""" from .builder import Builder __all__ = ["Builder"] __version__ = "8.17.4" es_client-8.17.4/src/es_client/builder.py000066400000000000000000000346431476634113400203370ustar00rootroot00000000000000"""Builder and associated Classes""" import typing as t import logging from dotmap import DotMap # type: ignore from elastic_transport import ObjectApiResponse import elasticsearch8 from es_client.helpers.schemacheck import password_filter from es_client.defaults import VERSION_MIN, VERSION_MAX, CLIENT_SETTINGS, OTHER_SETTINGS from es_client.exceptions import ConfigurationError, ESClientException, NotMaster from es_client.helpers.utils import ( check_config, ensure_list, file_exists, get_version, get_yaml, parse_apikey_token, prune_nones, verify_ssl_paths, verify_url_schema, ) logger = logging.getLogger(__name__) # pylint: disable=R0902 class Builder: """ :param configdict: A configuration dictionary :param configfile: A YAML configuration file :param autoconnect: Connect to client automatically Build a client connection object out of settings from `configfile` or `configdict`. If neither `configfile` nor `configdict` is provided, empty defaults will be used. If both are provided, `configdict` will be used, and `configfile` ignored. """ def __init__( self, configdict: t.Union[t.Dict, None] = None, configfile: t.Union[str, None] = None, autoconnect: bool = False, version_min: t.Tuple = VERSION_MIN, version_max: t.Tuple = VERSION_MAX, ): #: The DotMap storage for attributes and settings self.attributes = DotMap() self.set_client_defaults() self.set_other_defaults() #: The :py:class:`~.elasticsearch.Elasticsearch` client connection object self.client = elasticsearch8.Elasticsearch(hosts="http://127.0.0.1:9200") self.process_config_opts(configdict, configfile) self.version_max = version_max self.version_min = version_min self.update_config() self.validate() if autoconnect: self.connect() self.test_connection() @property def master_only(self) -> bool: """Only allow connection to the elected master, if ``True`` :getter: Get the "Only use the elected master?" state :setter: Set the "Only use the elected master?" state :type: bool """ return self.attributes.master_only @master_only.setter def master_only(self, value) -> None: self.attributes.master_only = value @property def is_master(self) -> bool: """Is the node we connected to the elected master? :getter: Get the "Are we the elected master?" state :setter: Set the "Are we the elected master?" state :type: bool """ return self.attributes.is_master @is_master.setter def is_master(self, value) -> None: self.attributes.is_master = value @property def config(self) -> DotMap: """Configuration settings extracted from ``configfile`` or ``configdict`` :getter: Get the configuration settings :setter: Set the configuration settings :type: DotMap """ return self.attributes.config @config.setter def config(self, value) -> None: self.attributes.config = DotMap(value) @property def client_args(self) -> DotMap: """The storage and workspace for ``client`` settings :getter: Get ``client`` values :setter: Set ``client`` values :type: DotMap """ return self.attributes.client_args @client_args.setter def client_args(self, value) -> None: self.attributes.client_args = DotMap(value) @property def other_args(self) -> DotMap: """The storage and workspace for ``other_settings`` :getter: Get ``other_args`` values :setter: Set ``other_args`` values :type: DotMap """ return self.attributes.other_args @other_args.setter def other_args(self, value) -> None: self.attributes.other_args = DotMap(value) @property def skip_version_test(self) -> bool: """ :getter: Get the ``skip_version_test`` value :setter: Set the ``skip_version_test`` value :type: bool """ return self.attributes.skip_version_test @skip_version_test.setter def skip_version_test(self, value: bool) -> None: self.attributes.skip_version_test = value @property def version_min(self) -> t.Tuple: """ :getter: Get the minimum acceptable Elasticsearch version :setter: Set the minimum acceptable Elasticsearch version :type: t.Tuple """ return self.attributes.version_min @version_min.setter def version_min(self, value) -> None: self.attributes.version_min = value @property def version_max(self) -> t.Tuple: """ :getter: Get the maximum acceptable Elasticsearch version :setter: Set the maximum acceptable Elasticsearch version :type: t.Tuple """ return self.attributes.version_max @version_max.setter def version_max(self, value) -> None: self.attributes.version_max = value def set_client_defaults(self) -> None: """Set defaults for the client_args property""" self.client_args = DotMap() for key in CLIENT_SETTINGS: self.client_args[key] = None def set_other_defaults(self) -> None: """Set defaults for the other_args property""" self.other_args = DotMap() for key in OTHER_SETTINGS: self.other_args[key] = None def process_config_opts( self, configdict: t.Union[t.Dict, None], configfile: t.Union[str, None] ) -> None: """Process whether to use a configdict or configfile""" if configfile: logger.debug("Using values from configfile: %s", configfile) self.config = check_config(get_yaml(configfile)) if configdict: logger.debug("Using configdict values: %s", password_filter(configdict)) self.config = check_config(configdict) if not configfile and not configdict: # Empty/Default config. logger.debug( "No configuration file or dictionary provided. Using defaults." ) self.config = check_config({"client": {}, "other_settings": {}}) def update_config(self) -> None: """Update object with values provided""" self.client_args.update(self.config.client) self.other_args.update(self.config.other_settings) self.master_only = self.other_args.master_only self.is_master = False # Preset, until we populate this later if "skip_version_test" in self.other_args: self.skip_version_test = self.other_args.skip_version_test else: self.skip_version_test = False def validate(self) -> None: """Validate that what has been supplied is acceptable to attempt a connection""" # Configuration pre-checks if self.client_args.hosts is not None: verified_hosts = [] self.client_args.hosts = ensure_list(self.client_args.hosts) for host in self.client_args.hosts: try: verified_hosts.append(verify_url_schema(host)) except ConfigurationError as exc: logger.critical("Invalid host schema detected: %s -- %s", host, exc) raise ConfigurationError( f"Invalid host schema detected: {host}" ) from exc self.client_args.hosts = verified_hosts self._check_basic_auth() self._check_api_key() self._check_cloud_id() self._check_ssl() def connect(self) -> None: """Attempt connection and do post-connection checks""" # Get the client self._get_client() # Post checks self._check_version() if self.master_only: self._check_multiple_hosts() self._find_master() self._check_if_master() def _check_basic_auth(self) -> None: """Create ``basic_auth`` tuple from username and password""" if "username" in self.other_args or "password" in self.other_args: usr = self.other_args.username if "username" in self.other_args else None pwd = self.other_args.password if "password" in self.other_args else None if usr is None and pwd is None: pass elif usr is None or pwd is None: msg = "Must populate both username and password, or leave both empty" raise ConfigurationError(msg) else: self.client_args.basic_auth = (usr, pwd) def _check_api_key(self) -> None: """ Create ``api_key`` tuple from :py:attr:`other_args` ``['api_key']`` subkeys ``id`` and ``api_key`` Or if ``api_key`` subkey ``token`` is present, derive ``id`` and ``api_key`` from ``token`` """ if "api_key" in self.other_args: # If present, token will override any value in 'id' or 'api_key' # pylint: disable=no-member if "token" in self.other_args.api_key: if self.other_args.api_key.token is not None: (self.other_args.api_key.id, self.other_args.api_key.api_key) = ( parse_apikey_token(self.other_args.api_key.token) ) if "id" in self.other_args.api_key or "api_key" in self.other_args.api_key: api_id = ( self.other_args.api_key.id if "id" in self.other_args.api_key else None ) api_key = ( self.other_args.api_key.api_key if "api_key" in self.other_args.api_key else None ) if api_id is None and api_key is None: self.client_args.api_key = ( None # Setting this here because of DotMap ) elif api_id is None or api_key is None: msg = "Must populate both id and api_key, or leave both empty" raise ConfigurationError(msg) else: self.client_args.api_key = (api_id, api_key) def _check_cloud_id(self) -> None: """Remove ``hosts`` key if ``cloud_id`` provided""" if "cloud_id" in self.client_args and self.client_args.cloud_id is not None: # We can remove the default if that's all there is if ( self.client_args.hosts == ["http://127.0.0.1:9200"] and len(self.client_args.hosts) == 1 ): self.client_args.hosts = None if self.client_args.hosts is not None: raise ConfigurationError('Cannot populate both "hosts" and "cloud_id"') def _check_ssl(self) -> None: """ Use `certifi `_ if using ssl and ``ca_certs`` has not been specified. """ verify_ssl_paths(self.client_args) if "cloud_id" in self.client_args and self.client_args.cloud_id is not None: scheme = "https" elif self.client_args.hosts is None: scheme = None else: scheme = self.client_args.hosts[0].split(":")[0].lower() if scheme == "https": if "ca_certs" not in self.client_args or not self.client_args.ca_certs: # pylint: disable=import-outside-toplevel import certifi # Use certifi certificates via certifi.where(): self.client_args.ca_certs = certifi.where() else: keylist = ["ca_certs", "client_cert", "client_key"] for key in keylist: if key in self.client_args and self.client_args[key]: if not file_exists(self.client_args[key]): msg = f'"{key}: {self.client_args[key]}" File not found!' logger.critical(msg) raise ConfigurationError(msg) def _find_master(self) -> None: """Find out if we are connected to the elected master node""" my_node_id = list(self.client.nodes.info(node_id="_local")["nodes"])[0] master_node_id = self.client.cluster.state(metric="master_node")["master_node"] self.is_master = my_node_id == master_node_id def _check_multiple_hosts(self) -> None: """Check for multiple hosts when master_only""" if "hosts" in self.client_args and isinstance(self.client_args.hosts, list): if len(self.client_args.hosts) > 1: raise ConfigurationError( f'"master_only" cannot be True if more than one host is ' f"specified. Hosts = {self.client_args.hosts}" ) def _check_if_master(self) -> None: """ If we are not connected to the elected master node, raise :py:exc:`~es_client.exceptions.NotMaster` """ if not self.is_master: msg = ( "The master_only flag is set to True, but the client is " "currently connected to a non-master node." ) logger.info(msg) raise NotMaster(msg) def _check_version(self) -> None: """ Compare the Elasticsearch cluster version to :py:attr:`min_version` and :py:attr:`max_version` """ v = get_version(self.client) if self.skip_version_test: logger.warning("Skipping Elasticsearch version checks") else: logger.debug("Detected version %s", ".".join(map(str, v))) if v >= self.version_max or v < self.version_min: msg = f"Elasticsearch version {'.'.join(map(str, v))} not supported" logger.error(msg) raise ESClientException(msg) def _get_client(self) -> None: """ Instantiate the :py:class:`~.elasticsearch.Elasticsearch` object and populate :py:attr:`client` """ # Eliminate any remaining "None" entries from the client arguments client_args = prune_nones(self.client_args.toDict()) self.client = elasticsearch8.Elasticsearch(**client_args) def test_connection(self) -> ObjectApiResponse[t.Any]: """ Connect and execute :meth:`Elasticsearch.info() ` """ return self.client.info() es_client-8.17.4/src/es_client/cli_example.py000066400000000000000000000113021476634113400211560ustar00rootroot00000000000000""" Sample CLI script that will get a client using both config file and CLI args/options """ import click from elasticsearch8.exceptions import BadRequestError, NotFoundError from es_client.helpers import config as cfg from es_client.defaults import OPTION_DEFAULTS from es_client.helpers.logging import configure_logging from es_client.commands import show_all_options, test_connection, test_stderr # Since this file will not be imported, we use this to squash the F401 error __all__ = ["BadRequestError", "NotFoundError"] # pylint: disable=E1120 # The following default options are all automatically added by the decorator: # # ``@cfg.options_from_dict(OPTION_DEFAULTS)`` # # Be sure to add any other options or arguments either before or after this decorator, # and add any added arguments in ``def run()``, preserving their order in both # locations. ``ctx`` needs to be the first arg after ``def run()`` as a special # argument for Click, and does not need a decorator function. # These options require the following other includes: # # from es_client.defaults import LOGGING_SETTINGS, ONOFF # from es_client.helpers.utils import option_wrapper # click_opt_wrap = option_wrapper() # # @click_opt_wrap(*cli_opts('config')) # @click_opt_wrap(*cli_opts('hosts')) # @click_opt_wrap(*cli_opts('cloud_id')) # @click_opt_wrap(*cli_opts('api_token')) # @click_opt_wrap(*cli_opts('id')) # @click_opt_wrap(*cli_opts('api_key')) # @click_opt_wrap(*cli_opts('username')) # @click_opt_wrap(*cli_opts('password')) # @click_opt_wrap(*cli_opts('bearer_auth')) # @click_opt_wrap(*cli_opts('opaque_id')) # @click_opt_wrap(*cli_opts('request_timeout')) # @click_opt_wrap(*cli_opts('http_compress', onoff=ONOFF)) # @click_opt_wrap(*cli_opts('verify_certs', onoff=ONOFF)) # @click_opt_wrap(*cli_opts('ca_certs')) # @click_opt_wrap(*cli_opts('client_cert')) # @click_opt_wrap(*cli_opts('client_key')) # @click_opt_wrap(*cli_opts('ssl_assert_hostname')) # @click_opt_wrap(*cli_opts('ssl_assert_fingerprint')) # @click_opt_wrap(*cli_opts('ssl_version')) # @click_opt_wrap(*cli_opts('master-only', onoff=ONOFF)) # @click_opt_wrap(*cli_opts('skip_version_test', onoff=ONOFF)) # @click_opt_wrap(*cli_opts('loglevel', settings=LOGGING_SETTINGS)) # @click_opt_wrap(*cli_opts('logfile', settings=LOGGING_SETTINGS)) # @click_opt_wrap(*cli_opts('logformat', settings=LOGGING_SETTINGS)) # @click_opt_wrap(*cli_opts('blacklist', settings=LOGGING_SETTINGS)) # pylint: disable=R0913,R0914,W0613,W0622 @click.group(context_settings=cfg.context_settings()) @cfg.options_from_dict(OPTION_DEFAULTS) @click.version_option(None, "-v", "--version", prog_name="cli_example") @click.pass_context def run( ctx, config, hosts, cloud_id, api_token, id, api_key, username, password, bearer_auth, opaque_id, request_timeout, http_compress, verify_certs, ca_certs, client_cert, client_key, ssl_assert_hostname, ssl_assert_fingerprint, ssl_version, master_only, skip_version_test, loglevel, logfile, logformat, blacklist, ): """ CLI Example Any text added to a docstring will show up in the --help/usage output. Set short_help='' in @func.command() definitions for each command for terse descriptions in the main help/usage output, as with show_all_options() in this example. """ # If there's a default file location for client configuration, e.g. # $HOME/.curator/curator.yml, then specify it here. ctx.obj is now instantiated in # ``helpers.config.cfg.context_settings()`` ctx.obj["default_config"] = None # The ``cfg.get_config`` function will grab the configuration derived from a YAML # config file specified in command-line parameters, or if that is unspecified but # ctx.obj['default_config'] is provided, use that. If quiet=True, suppress the line # written to STDOUT that indicates the file at ctx.obj['default_config'] is being # used. # If neither ctx.params['config'] nor ctx.obj['default_config'] reference a YAML # configuration file, then a config dict with empty/default configured is # generated. The result is stored in ctx.obj['draftcfg'] cfg.get_config(ctx, quiet=False) # Configure logging. This will use the values from command line parameters, or # what's now been stored in ctx.obj['draftcfg'] configure_logging(ctx) # The ``cfg.generate_configdict`` function does all of the overriding of YAML # config file options by command-line specified ones and stores the ready-to-be- # used by Builder configuration in ctx.obj['configdict'] cfg.generate_configdict(ctx) run.add_command(show_all_options) run.add_command(test_connection) run.add_command(test_stderr) if __name__ == "__main__": run() es_client-8.17.4/src/es_client/commands.py000066400000000000000000000113361476634113400205040ustar00rootroot00000000000000""" Click commands to follow the top-level """ import logging import click from es_client.helpers import config as cfg from es_client.defaults import SHOW_EVERYTHING # pylint: disable=R0913,R0914,W0613,W0622 # SHOW ALL OPTIONS # # Below is the ``show-all-options`` command which overrides the default with the values # in the OVERRIDE constant (``hidden: False`` and ``show_env_vars: True``) which will # reveal any hidden by default options in the top-level menu so they are exposed in the # --help output, as well as show the environment variable name that can be used to set # the option without a flag/argument. # The below options are all included automatically by the decorator: # # ``@cfg.options_from_dict(SHOW_EVERYTHING)`` # # These options require the following other includes: # # from es_client.defaults import LOGGING_SETTINGS, ONOFF, OVERRIDE # from es_client.helpers.utils import option_wrapper # click_opt_wrap = option_wrapper() # # @click_opt_wrap(*cli_opts('config', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('hosts', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('cloud_id', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('api_token', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('id', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('api_key', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('username', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('password', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('bearer_auth', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('opaque_id', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('request_timeout', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('http_compress', onoff=ONOFF, override=OVERRIDE)) # @click_opt_wrap(*cli_opts('verify_certs', onoff=ONOFF, override=OVERRIDE)) # @click_opt_wrap(*cli_opts('ca_certs', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('client_cert', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('client_key', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('ssl_assert_hostname', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('ssl_assert_fingerprint', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('ssl_version', override=OVERRIDE)) # @click_opt_wrap(*cli_opts('master-only', onoff=ONOFF, override=OVERRIDE)) # @click_opt_wrap(*cli_opts('skip_version_test', onoff=ONOFF, override=OVERRIDE)) # @click_opt_wrap(*cli_opts('loglevel', settings=LOGGING_SETTINGS, override=OVERRIDE)) # @click_opt_wrap(*cli_opts('logfile', settings=LOGGING_SETTINGS, override=OVERRIDE)) # @click_opt_wrap(*cli_opts('logformat', settings=LOGGING_SETTINGS, override=OVERRIDE)) # @click_opt_wrap(*cli_opts('blacklist', settings=LOGGING_SETTINGS, override=OVERRIDE)) # NOTE: Different procedure for show_all_options than other sub-commands # Normally, for a sub-command, you would not reset the `cfg.context_settings` as we've # done here because it also resets the context (ctx). We normally want to pass this # along from the top level command. In this case, we want it to look like the # root-level command for the sake of the environment variables being shown for the # root-level and not a sub-level command. @click.command( context_settings=cfg.context_settings(), short_help="Show all client configuration options", ) @cfg.options_from_dict(SHOW_EVERYTHING) @click.pass_context def show_all_options( ctx, config, hosts, cloud_id, api_token, id, api_key, username, password, bearer_auth, opaque_id, request_timeout, http_compress, verify_certs, ca_certs, client_cert, client_key, ssl_assert_hostname, ssl_assert_fingerprint, ssl_version, master_only, skip_version_test, loglevel, logfile, logformat, blacklist, ): """ ALL OPTIONS SHOWN The full list of options available for configuring a connection at the command-line. """ ctx = click.get_current_context() click.echo(ctx.get_help()) ctx.exit() @click.command() @click.pass_context def test_connection(ctx): """ Test connection to Elasticsearch """ # Because of `@click.pass_context`, we can access `ctx.obj` here from the `run` # function that made it: client = cfg.get_client(configdict=ctx.obj["configdict"]) # If we're here, we'll see the output from GET http(s)://hostname.tld:PORT click.secho("\nConnection result: ", bold=True) click.secho(f"{client.info()}\n") @click.command() @click.pass_context def test_stderr(ctx): """ Test STDERR logging """ logger = logging.getLogger(__name__) logger.debug("This is a debug message") logger.info("This is an info message") logger.warning("This is a warning message") logger.error("This is an error message") logger.critical("This is a critical message") click.secho("\nLogging test complete.\n") es_client-8.17.4/src/es_client/defaults.py000066400000000000000000000414261476634113400205150ustar00rootroot00000000000000"""Define default values""" # pylint: disable=line-too-long import typing as t from copy import deepcopy from click import Choice, Path from voluptuous import All, Any, Boolean, Coerce, Optional, Range, Schema VERSION_MIN: t.Tuple = (8, 0, 0) """Minimum compatible Elasticsearch version""" VERSION_MAX: t.Tuple = (8, 99, 99) """Maximum compatible Elasticsearch version""" KEYS_TO_REDACT: t.Sequence[str] = [ "password", "basic_auth", "bearer_auth", "api_key", "id", "opaque_id", ] """ When doing configuration Schema validation, redact the value from any listed dictionary key. This only happens if logging is at DEBUG level. """ CLIENT_SETTINGS: t.Sequence[str] = [ "hosts", "cloud_id", "api_key", "basic_auth", "bearer_auth", "opaque_id", "headers", "connections_per_node", "http_compress", "verify_certs", "ca_certs", "client_cert", "client_key", "ssl_assert_hostname", "ssl_assert_fingerprint", "ssl_version", "ssl_context", "ssl_show_warn", "transport_class", "request_timeout", "node_class", "node_pool_class", "randomize_nodes_in_pool", "node_selector_class", "dead_node_backoff_factor", "max_dead_node_backoff", "serializer", "serializers", "default_mimetype", "max_retries", "retry_on_status", "retry_on_timeout", "sniff_on_start", "sniff_before_requests", "sniff_on_node_failures", "sniff_timeout", "min_delay_between_sniffing", "sniffed_node_callback", "meta_header", "host_info_callback", "_transport", ] """ Valid argument/option names for :py:class:`~.elasticsearch8.Elasticsearch`. Too large to show """ OTHER_SETTINGS: t.Sequence[str] = [ "master_only", "skip_version_test", "username", "password", "api_key", ] """Valid option names for :py:class:`~.es_client.builder.Builder`'s other settings""" CLICK_SETTINGS: t.Dict[str, t.Dict] = { "config": {"help": "Path to configuration file.", "type": Path(exists=True)}, "hosts": {"help": "Elasticsearch URL to connect to.", "multiple": True}, "cloud_id": {"help": "Elastic Cloud instance id"}, "api_token": {"help": "The base64 encoded API Key token", "type": str}, "id": {"help": 'API Key "id" value', "type": str}, "api_key": {"help": 'API Key "api_key" value', "type": str}, "username": {"help": "Elasticsearch username", "type": str}, "password": {"help": "Elasticsearch password", "type": str}, "bearer_auth": {"help": "Bearer authentication token", "type": str, "hidden": True}, "opaque_id": {"help": "X-Opaque-Id HTTP header value", "type": str, "hidden": True}, "request_timeout": {"help": "Request timeout in seconds", "type": float}, "http_compress": { "help": "Enable HTTP compression", "default": None, "hidden": True, }, "verify_certs": {"help": "Verify SSL/TLS certificate(s)", "default": None}, "ca_certs": {"help": "Path to CA certificate file or directory", "type": str}, "client_cert": {"help": "Path to client certificate file", "type": str}, "client_key": {"help": "Path to client key file", "type": str}, "ssl_assert_hostname": { "help": "Hostname or IP address to verify on the node's certificate.", "type": str, "hidden": True, }, "ssl_assert_fingerprint": { "help": ( "SHA-256 fingerprint of the node's certificate. If this value is given " "then root-of-trust verification isn't done and only the node's " "certificate fingerprint is verified." ), "type": str, "hidden": True, }, "ssl_version": { "help": "Minimum acceptable TLS/SSL version", "type": str, "hidden": True, }, "master-only": { "help": "Only run if the single host provided is the elected master", "default": None, "hidden": True, }, "skip_version_test": { "help": "Elasticsearch version compatibility check", "default": None, "hidden": True, }, } """Default settings used for building :py:class:`click.Option`. Too large to show.""" ES_DEFAULT: t.Dict = {"elasticsearch": {"client": {"hosts": ["http://127.0.0.1:9200"]}}} """Default settings for :py:class:`~.es_client.builder.Builder`""" ENV_VAR_PREFIX: str = "ESCLIENT" """Environment variable prefix""" LOGLEVEL: None = None """Default loglevel""" LOGFILE: None = None """Default value for logfile""" LOGFORMAT: t.Literal['default', 'ecs', 'json'] = 'default' """Default value for logformat""" BLACKLIST: None = None """Default value for logging blacklist""" LOGDEFAULTS: t.Dict = { "loglevel": LOGLEVEL, "logfile": LOGFILE, "logformat": LOGFORMAT, "blacklist": BLACKLIST, } """All logging defaults in a single combined dictionary""" LOGGING_SETTINGS: t.Dict[str, t.Dict] = { "loglevel": { "help": "Log level", "type": Choice(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]), "default": None, }, "logfile": {"help": "Log file", "type": str}, "logformat": { "help": "Log output format", "type": Choice(["default", "json", "ecs"]), "default": None, }, "blacklist": { "help": "Named entities will not be logged", "multiple": True, "default": None, "hidden": True, }, } """ Default logging settings used for building :py:class:`click.Option`. Too large to show. """ SHOW_OPTION: t.Dict[str, bool] = {"hidden": False} """Override value to "unhide" a :py:class:`click.Option`""" SHOW_ENVVAR: t.Dict[str, bool] = {"show_envvar": True} """Override value to make Click's help output show the associated environment variable """ OVERRIDE: t.Dict = {**SHOW_OPTION, **SHOW_ENVVAR} """Override value to combine these into a single constant""" ONOFF: t.Dict[str, str] = {"on": "", "off": "no-"} """Default values for enable/disable click options""" OPTION_DEFAULTS: t.Dict[str, t.Dict] = { "config": {}, "hosts": {}, "cloud_id": {}, "api_token": {}, "id": {}, "api_key": {}, "username": {}, "password": {}, "bearer_auth": {}, "opaque_id": {}, "request_timeout": {}, "http_compress": {"onoff": ONOFF}, "verify_certs": {"onoff": ONOFF}, "ca_certs": {}, "client_cert": {}, "client_key": {}, "ssl_assert_hostname": {}, "ssl_assert_fingerprint": {}, "ssl_version": {}, "master-only": {"onoff": ONOFF}, "skip_version_test": {"onoff": ONOFF}, "loglevel": {"settings": LOGGING_SETTINGS["loglevel"]}, "logfile": {"settings": LOGGING_SETTINGS["logfile"]}, "logformat": {"settings": LOGGING_SETTINGS["logformat"]}, "blacklist": {"settings": LOGGING_SETTINGS["blacklist"]}, } """Default options for iteratively building Click decorators""" def all_on() -> t.Dict[str, t.Dict]: """Return default options with all overrides enabled""" options = deepcopy(OPTION_DEFAULTS) retval = {} # pylint: disable=consider-using-dict-items for option in options: retval[option] = options[option] retval[option]["override"] = OVERRIDE return retval SHOW_EVERYTHING: t.Dict[str, t.Dict] = all_on() """Return options for iteratively building Click decorators with all overrides on""" # Logging schema def config_logging() -> Schema: """ :returns: A validation schema of all acceptable logging configuration parameter names and values with defaults for unset parameters. :rtype: :py:class:`~.voluptuous.schema_builder.Schema` Logging schema with defaults: .. code-block:: yaml logging: loglevel: INFO logfile: None logformat: default blacklist: ['elastic_transport', 'urllib3'] """ return Schema( { Optional("loglevel", default="INFO"): Any( None, "NOTSET", "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL", All(Coerce(int), Any(0, 10, 20, 30, 40, 50)), ), Optional("logfile", default=None): Any(None, str), Optional("logformat", default="default"): Any( None, All(Any(str), Any("default", "json", "ecs")) ), Optional("blacklist", default=["elastic_transport", "urllib3"]): Any( None, list ), } ) # All elasticsearch client options, with a few additional arguments. def config_schema() -> Schema: """ :returns: A validation schema of all acceptable client configuration parameter names and values with defaults for unset parameters. :rtype: :py:class:`~.voluptuous.schema_builder.Schema` The validation schema for an :py:class:`~.elasticsearch8.Elasticsearch` client object with defaults """ # pylint: disable=no-value-for-parameter return Schema( { Optional("other_settings", default={}): { Optional("master_only", default=False): Boolean(), Optional("skip_version_test", default=False): Boolean(), Optional("username", default=None): Any(None, str), Optional("password", default=None): Any(None, str), Optional("api_key", default={}): { Optional("id"): Any(None, str), Optional("api_key"): Any(None, str), Optional("token"): Any(None, str), }, }, Optional("client", default={}): { Optional("hosts", default=None): Any(None, list, str), Optional("cloud_id", default=None): Any(None, str), Optional("api_key"): Any(None, tuple), Optional("basic_auth"): Any(None, tuple), Optional("bearer_auth"): Any(None, str), Optional("opaque_id"): Any(None, str), Optional("headers"): Any(None, dict), Optional("connections_per_node"): Any( None, All(Coerce(int), Range(min=1, max=100)) ), Optional("http_compress"): Boolean(), Optional("verify_certs"): Boolean(), Optional("ca_certs"): Any(None, str), Optional("client_cert"): Any(None, str), Optional("client_key"): Any(None, str), #: Hostname or IP address to verify on the node's certificate. #: This is useful if the certificate contains a different value #: than the one supplied in ``host``. An example of this situation #: is connecting to an IP address instead of a hostname. #: Set to ``False`` to disable certificate hostname verification. Optional("ssl_assert_hostname"): Any(None, str), #: SHA-256 fingerprint of the node's certificate. If this value is #: given then root-of-trust verification isn't done and only the #: node's certificate fingerprint is verified. #: #: On CPython 3.10+ this also verifies if any certificate in the #: chain including the Root CA matches this fingerprint. However #: because this requires using private APIs support for this is #: **experimental**. Optional("ssl_assert_fingerprint"): Any(None, str), Optional("ssl_version"): Any( None, str ), # Minimum acceptable TLS/SSL version #: Pre-configured :class:`ssl.SSLContext` OBJECT. If this value #: is given then no other TLS options (besides #: ``ssl_assert_fingerprint``) can be set on the #: :class:`elastic_transport.NodeConfig`. Optional("ssl_context"): Any(None, str), # Keeping this here in case someone APIs it, but otherwise it's not # likely to be used. Optional("ssl_show_warn"): Boolean(), Optional("transport_class"): Any(None, str), Optional("request_timeout"): Any( None, All(Coerce(float), Range(min=0.1, max=86400.0)) ), # node_class: Union[str, Type[BaseNode]] = Urllib3HttpNode, Optional("node_class"): Any(None, str), # node_pool_class: Type[NodePool] = NodePool, Optional("node_pool_class"): Any(None, str), Optional("randomize_nodes_in_pool"): Boolean(), # node_selector_class: Optional[Union[str, Type[NodeSelector]]] = None, Optional("node_selector_class"): Any(None, str), Optional("dead_node_backoff_factor"): Any(None, float), Optional("max_dead_node_backoff"): Any(None, float), # One of: # "Serializer" # "JsonSerializer" # "TextSerializer" # "NdjsonSerializer" # "CompatibilityModeJsonSerializer" # "CompatibilityModeNdjsonSerializer" # "MapboxVectorTileSerializer" Optional("serializer"): Any(None, str), # ??? # :arg serializers: optional dict of serializer instances that will be # used for deserializing data coming from the server. (key is the # mimetype), e.g.: {'mimetype':'serializer'} # "Serializer" # "JsonSerializer" # "TextSerializer" # "NdjsonSerializer" # "CompatibilityModeJsonSerializer" # "CompatibilityModeNdjsonSerializer" # "MapboxVectorTileSerializer" Optional("serializers"): Any(None, dict), Optional("default_mimetype"): Any(None, str), Optional("max_retries"): Any( None, All(Coerce(int), Range(min=1, max=100)) ), # retry_on_status: Collection[int] = (429, 502, 503, 504), Optional("retry_on_status"): Any(None, tuple), Optional("retry_on_timeout"): Boolean(), Optional("sniff_on_start"): Boolean(), Optional("sniff_before_requests"): Boolean(), Optional("sniff_on_node_failure"): Boolean(), Optional("sniff_timeout"): Any( None, All(Coerce(float), Range(min=0.1, max=100.0)) ), Optional("min_delay_between_sniffing"): Any( None, All(Coerce(float), Range(min=1, max=100.0)) ), # Optional[ # Callable[ # ["Transport", "SniffOptions"], # Union[List[NodeConfig], List[NodeConfig]], # ] # ] = None, Optional("sniffed_node_callback"): Any(None, str), Optional("meta_header"): Boolean(), # Cannot specify both 'request_timeout' and 'timeout' # Optional('timeout', default=10.0): All(Coerce(float), # Range(min=1, max=120)), # Cannot specify both 'randomize_hosts' and 'randomize_nodes_in_pool' # Optional('randomize_hosts', default=True): Boolean(), Optional("host_info_callback"): Any( None, str ), # ??? needs the name of a callback function # Cannot specify both 'sniffer_timeout' and 'min_delay_between_sniffing' # Optional('sniffer_timeout', default=0.5): All(Coerce(float), # Range(min=0.1, max=10.0)), # Cannot specify both 'sniff_on_connection_fail' and # 'sniff_on_node_failure' # Optional('sniff_on_connection_fail', default=False): Boolean(), # Optional('http_auth'): Any(None, str), # Favor basic_auth instead Optional("_transport"): Any(None, str), # ??? }, } ) def version_max() -> t.Tuple: """Return the max version""" return VERSION_MAX def version_min() -> t.Tuple: """Return the min version""" return VERSION_MIN def client_settings() -> t.Sequence[str]: """Return the client settings""" return CLIENT_SETTINGS def config_settings() -> t.Sequence[str]: """ Return only the client settings likely to be used in a config file or at the command-line. This means ignoring some that are valid in :py:class:`~.elasticsearch8.Elasticsearch` but are handled different locally. Namely, ``api_key`` is handled by :py:class:`~.es_client.builder.OtherArgs`. """ ignore = ["api_key"] settings = [] for setting in CLIENT_SETTINGS: if setting not in ignore: settings.append(setting) return settings def other_settings() -> t.Sequence[str]: """Return the other settings""" return OTHER_SETTINGS es_client-8.17.4/src/es_client/exceptions.py000066400000000000000000000016321476634113400210620ustar00rootroot00000000000000"""es_client Exception classes""" class ESClientException(Exception): """ Base class for all exceptions raised by es_client which are not Elasticsearch exceptions. """ class ConfigurationError(ESClientException): """ Exception raised when a misconfiguration is detected """ class MissingArgument(ESClientException): """ Exception raised when a needed argument is not passed. """ class NotMaster(ESClientException): """ Exception raised when connected node is not the elected master node. """ class LoggingException(ESClientException): """ Exception raised when logging cannot be configured properly """ class SchemaException(ESClientException): """ Exception base class for all exceptions related to Schema failure """ class FailedValidation(SchemaException): """ Exception raised when SchemaCheck validation fails. """ es_client-8.17.4/src/es_client/helpers/000077500000000000000000000000001476634113400177675ustar00rootroot00000000000000es_client-8.17.4/src/es_client/helpers/__init__.py000066400000000000000000000000001476634113400220660ustar00rootroot00000000000000es_client-8.17.4/src/es_client/helpers/config.py000066400000000000000000000544151476634113400216170ustar00rootroot00000000000000"""Command-line configuration parsing and client builder helper functions""" import typing as t import logging from shutil import get_terminal_size from dotmap import DotMap # type: ignore from click import Context, secho, option as clickopt from elasticsearch8 import Elasticsearch from es_client.builder import Builder from es_client.defaults import ( CLICK_SETTINGS, ENV_VAR_PREFIX, VERSION_MIN, VERSION_MAX, config_settings, ) from es_client.exceptions import ESClientException, ConfigurationError from es_client.helpers.utils import ( check_config, get_yaml, prune_nones, verify_url_schema, ) def cli_opts( value: str, settings: t.Union[t.Dict, None] = None, onoff: t.Union[t.Dict, None] = None, override: t.Union[t.Dict, None] = None, ) -> t.Tuple[t.Tuple[str,], t.Dict]: """ :param value: The command-line :py:class:`option ` name. The key must be present in `settings`, or in :py:const:`CLICK_SETTINGS ` :param settings: A dictionary consisting of :py:class:`click.Option` names as keys, with each key having a dictionary consisting of :py:class:`click.Option` parameter names as keys, with their associated settings as the value. If `settings` is not provided, it will be populated by :py:const:`CLICK_SETTINGS `. :param onoff: A dictionary consisting of the keys `on` and `off`, with values used to set up a `Click boolean option`_, .e.g. ``{'on': '', 'off': 'no-'}``. See below for examples. :param override: A dictionary consisting of keys in `settings` with values you wish to override. :type value: str :type settings: dict :type onoff: dict :type override: dict :rtype: Tuple :returns: A value suitable to use with the :py:func:`click.option` decorator, appearing as a tuple containing a tuple and a dictionary, e.g. .. code-block:: python (('--OPTION1',),{'key1', 'value1', ...}) Click uses decorators to establish :py:class:`options ` and :py:class:`arguments ` for a :py:class:`command `. The parameters specified for these decorator functions can be stored as default dictionaries, then expanded and overridden, if desired. In the `cli_example.py` file, the regular :py:func:`click.option decorator function ` is wrapped by :py:func:`option_wrapper() `, and is aliased as ``click_opt_wrap``. This wrapped decorator in turn calls this function and utilizes ``*`` arg expansion. If `settings` is `None`, default values from :py:const:`CLICK_SETTINGS `, are used to populate `settings`. This function calls :func:`override_settings()` to override keys in `settings` with values from matching keys in `override`. In the example file, this looks like this: .. code-block:: python import click from es_client.helpers.utils import option_wrapper defaults.ONOFF = {'on': '', 'off': 'no-'} click_opt_wrap = option_wrapper() # ... @click.group(context_settings=context_settings()) @click_opt_wrap(*cli_opts('OPTION1', settings={KEY: NEWVALUE})) @click_opt_wrap(*cli_opts('OPTION2', onoff=tgl)) # ... @click_opt_wrap(*cli_opts('OPTIONX')) @click.pass_context def run(ctx, OPTION1, OPTION2, ..., OPTIONX): # code here The default setting KEY of ``OPTION1`` would be overriden by NEWVALUE. ``OPTION2`` automatically becomes a `Click boolean option`_, which splits the option into an enabled/disabled dichotomy by option name. In this example, it will be rendered as: .. code-block:: shell '--OPTION2/--no-OPTION2' The dictionary structure of `defaults.ONOFF` is what this what this function requires, i.e. an `on` key and an `off` key. The values for `on` and `off` can be whatever you like, e.g. .. code-block:: python defaults.ONOFF = {'on': 'enable-', 'off': 'disable-'} which, based on the above example, would render as: .. code-block:: shell '--enable-OPTION2/--disable-OPTION2' It could also be: .. code-block:: python defaults.ONOFF = {'on': 'monty-', 'off': 'python-'} which would render as: .. code-block:: shell '--monty-OPTION2/--python-OPTION2' but that would be too silly. A :py:exc:`ConfigurationError ` is raised `value` is not found as a key in `settings`, or if the `onoff` parsing fails. .. _Click boolean option: https://click.palletsprojects.com/en/8.1.x/options/#boolean-flags """ if override is None: override = {} if settings is None: settings = CLICK_SETTINGS if not isinstance(settings, dict): raise ConfigurationError(f'"settings" is not a dictionary: {type(settings)}') if value not in settings: raise ConfigurationError(f"{value} not in settings") argval = f"--{value}" if isinstance(onoff, dict): try: argval = f'--{onoff["on"]}{value}/--{onoff["off"]}{value}' except KeyError as exc: raise ConfigurationError from exc return (argval,), override_settings(settings[value], override) def cloud_id_override(args: t.Dict, ctx: Context) -> t.Dict: """ :param args: A dictionary built from :py:attr:`ctx.params ` keys and values. :param ctx: The Click command context :type args: dict :type ctx: :py:class:`Context ` :rtype: dict :returns: Updated version of `args` If ``hosts`` are defined in the YAML configuration file, but ``cloud_id`` is specified at the command-line, we need to remove the ``hosts`` parameter from the configuration dictionary built from the YAML file before merging. Command-line provided arguments always supersede configuration file ones. In this case, ``cloud_id`` and ``hosts`` are mutually exclusive, and the command-line provided ``cloud_id`` must supersede a configuration file provided ``hosts``. This function returns an updated dictionary `args` to be used for the final configuration as well as updates the :py:attr:`ctx.obj['client_args'] ` object. It's simply easier to merge dictionaries using a separate object. It would be a pain and unnecessary to make another entry in :py:attr:`ctx.obj ` for this. """ logger = logging.getLogger(__name__) if "cloud_id" in ctx.params and ctx.params["cloud_id"]: logger.debug( "cloud_id from command-line superseding configuration file settings" ) ctx.obj["client_args"].hosts = None args.pop("hosts", None) return args def context_settings() -> t.Dict: """ :rtype: dict :returns: kwargs suitable to be used as Click :py:class:`Command ` `context_settings` parameters. Includes the terminal width from :py:func:`get_width()` Help format settings: .. code-block:: python help_option_names=['-h', '--help'] The default context object (``ctx.obj``) dictionary: .. code-block:: python obj={'default_config': None} And automatic environment variable reading based on a prefix value: .. code-block:: python auto_envvar_prefix=ENV_VAR_PREFIX from :py:const:`ENV_VAR_PREFIX ` """ objdef = {"obj": {"default_config": None}} prefix = {"auto_envvar_prefix": ENV_VAR_PREFIX} help_options = {"help_option_names": ["-h", "--help"]} return {**get_width(), **help_options, **objdef, **prefix} def generate_configdict(ctx: Context) -> None: """ :param ctx: The Click command context :type ctx: :py:class:`Context ` :rtype: None Generate a client configuration dictionary from :py:attr:`ctx.params ` and :py:attr:`ctx.obj['default_config'] ` (if provided), suitable for use as the ``VALUE`` in :py:class:`Builder(configdict=VALUE) ` It is stored as :py:attr:`ctx.obj['default_config'] ` and can be referenced after this function returns. The flow of this function is as follows: Step 1: Call :func:`get_arg_objects()` to create :py:attr:`ctx.obj['client_args'] ` and :py:attr:`ctx.obj['other_args'] `, then update their values from :py:attr:`ctx.obj['draftcfg'] ` (which was populated by :func:`get_config()`). Step 2: Call :func:`override_client_args()` and :func:`override_other_args()`, which will use command-line args from :py:attr:`ctx.params ` to override any values from the YAML configuration file. Step 3: Populate :py:attr:`ctx.obj['configdict'] ` from the resulting values. """ get_arg_objects(ctx) override_client_args(ctx) override_other_args(ctx) ctx.obj["configdict"] = { "elasticsearch": { "client": prune_nones(ctx.obj["client_args"].toDict()), "other_settings": prune_nones(ctx.obj["other_args"].toDict()), } } def get_arg_objects(ctx: Context) -> None: """ :param ctx: The Click command context :type ctx: :py:class:`Context ` :rtype: None Set :py:attr:`ctx.obj['client_args'] ` as a :py:class:`~.dotmap.DotMap` object, and :py:attr:`ctx.obj['other_args'] ` as an :py:class:`~.dotmap.DotMap` object. These will be updated with values returned from :func:`check_config(ctx.obj['draftcfg']) `. :py:attr:`ctx.obj['draftcfg'] ` was populated when :func:`get_config()` was called. """ ctx.obj["client_args"] = DotMap() ctx.obj["other_args"] = DotMap() validated_config = check_config(ctx.obj["draftcfg"], quiet=True) ctx.obj["client_args"].update(DotMap(validated_config["client"])) ctx.obj["other_args"].update(DotMap(validated_config["other_settings"])) def get_client( configdict: t.Union[t.Dict, None] = None, configfile: t.Union[str, None] = None, autoconnect: bool = False, version_min: t.Tuple = VERSION_MIN, version_max: t.Tuple = VERSION_MAX, ) -> Elasticsearch: """ :param configdict: A configuration dictionary :param configfile: A YAML configuration file :param autoconnect: Connect to client automatically :returns: A client connection object :rtype: :py:class:`~.elasticsearch.Elasticsearch` Get an Elasticsearch Client using :py:class:`~.es_client.builder.Builder` Build a client connection object out of settings from `configfile` or `configdict`. If neither `configfile` nor `configdict` is provided, empty defaults will be used. If both are provided, `configdict` will be used, and `configfile` ignored. Raises :py:exc:`ESClientException ` if unable to connect. """ logger = logging.getLogger(__name__) logger.debug("Creating client object and testing connection") builder = Builder( configdict=configdict, configfile=configfile, autoconnect=autoconnect, version_max=version_max, version_min=version_min, ) try: builder.connect() except Exception as exc: logger.critical("Unable to establish client connection to Elasticsearch!") logger.critical("Exception encountered: %s", exc) raise ESClientException from exc return builder.client def get_config(ctx: Context, quiet: bool = True) -> Context: """ :param ctx: The Click command context :param quiet: If the default configuration file is being used, suppress the ``STDOUT`` message indicating that. :type ctx: :py:class:`Context ` :type quiet: bool :rtype: None If :py:attr:`ctx.params['config'] ` is a valid path, return the validated dictionary from the YAML. If nothing has been provided to :py:attr:`ctx.params['config'] `, but :py:attr:`ctx.obj['default_config'] ` is populated, use that, and write a line to ``STDOUT`` explaining this, unless `quiet` is `True`. Writing directly to ``STDOUT`` is done here because logging has not yet been configured, nor can it be as the configuration options are just barely being read. Store the result in :py:attr:`ctx.obj['draftcfg'] ` """ ctx.obj["draftcfg"] = {"config": {}} # Set a default empty value if ctx.params["config"]: ctx.obj["draftcfg"] = get_yaml(ctx.params["config"]) # If no config was provided, but default config path exists, use it instead elif "default_config" in ctx.obj and ctx.obj["default_config"]: if not quiet: secho( f"Using default configuration file at {ctx.obj['default_config']}", bold=True, ) ctx.obj["draftcfg"] = get_yaml(ctx.obj["default_config"]) return ctx def get_hosts(ctx: Context) -> t.Union[t.Sequence[str], None]: """ :param ctx: The Click command context :type ctx: :py:class:`Context ` :returns: A list of hosts :rtype: list Return a list of hosts suitable for :py:attr:`ClientArgs.hosts ` from :py:attr:`ctx.params['hosts'] `, validating the url schema for Elasticsearch compliance for each host provided. Raises a :py:exc:`ConfigurationError ` if schema validation fails. """ logger = logging.getLogger(__name__) hostslist = [] if "hosts" in ctx.params and ctx.params["hosts"]: for host in list(ctx.params["hosts"]): try: hostslist.append(verify_url_schema(host)) except ConfigurationError as err: logger.error("Incorrect URL Schema: %s", err) raise ConfigurationError from err else: return None return hostslist def get_width() -> t.Dict: """ :rtype: dict :returns: A dictionary suitable for use by itself as the Click :py:class:`Command ` `context_settings` parameter. Determine terminal width by calling :py:func:`shutil.get_terminal_size` Return value takes the form of ``{"max_content_width": get_terminal_size()[0]}`` """ return {"max_content_width": get_terminal_size()[0]} def hosts_override(args: t.Dict, ctx: Context) -> t.Dict: """ :param args: A dictionary built from :py:attr:`ctx.params ` keys and values. :param ctx: The Click command context :type args: dict :type ctx: :py:class:`Context ` :rtype: dict :returns: Updated version of `args` If `hosts` are provided at the command-line and are present in :py:attr:`ctx.params['hosts'] `, but `cloud_id` was in the config file, we need to remove the `cloud_id` key from the configuration dictionary built from the YAML file before merging. Command-line provided arguments always supersede configuration file ones, including `hosts` overriding a file-based `cloud_id`. This function returns an updated dictionary `args` to be used for the final configuration as well as updates the :py:attr:`ctx.obj['client_args'] ` object. It's simply easier to merge dictionaries using a separate object. It would be a pain and unnecessary to make another entry in :py:attr:`ctx.obj ` for this. """ logger = logging.getLogger(__name__) if "hosts" in ctx.params and ctx.params["hosts"]: logger.debug("hosts from command-line superseding configuration file settings") ctx.obj["client_args"].hosts = None ctx.obj["client_args"].cloud_id = None args.pop("cloud_id", None) return args def options_from_dict(options_dict) -> t.Callable: """Build Click options decorators programmatically""" def decorator(func): for option in reversed(options_dict): # Shorten our "if" statements by making dct shorthand for # options_dict[option] dct = options_dict[option] onoff = dct["onoff"] if "onoff" in dct else None override = dct["override"] if "override" in dct else None settings = dct["settings"] if "settings" in dct else None if settings is None: settings = CLICK_SETTINGS[option] argval = f"--{option}" if isinstance(onoff, dict): try: argval = f'--{onoff["on"]}{option}/--{onoff["off"]}{option}' except KeyError as exc: raise ConfigurationError from exc param_decls = (argval, option.replace("-", "_")) attrs = override_settings(settings, override) if override else settings clickopt(*param_decls, **attrs)(func) return func return decorator def override_client_args(ctx: Context) -> None: """ :param ctx: The Click command context :type ctx: :py:class:`Context ` :rtype: None Override :py:attr:`ctx.obj['client_args'] ` settings with any values found in :py:attr:`ctx.params ` Update :py:attr:`ctx.obj['client_args'] ` with the results. In the event that there are neither ``hosts`` nor a ``cloud_id`` after the updates, log to debug that this is the case, and that the default value for ``hosts`` of ``http://127.0.0.1:9200`` will be used. """ logger = logging.getLogger(__name__) args = {} # Populate args from ctx.params for key, value in ctx.params.items(): if key in config_settings(): if key == "hosts": args[key] = get_hosts(ctx) elif value is not None: args[key] = value args = cloud_id_override(args, ctx) args = hosts_override(args, ctx) args = prune_nones(args) # Update the object if we have settings to override after pruning None values if args: for arg in args: logger.debug("Using value for %s provided as a command-line option", arg) ctx.obj["client_args"].update(DotMap(args)) # Use a default hosts value of localhost:9200 if there is no host and no cloud_id if ctx.obj["client_args"].hosts is None and ctx.obj["client_args"].cloud_id is None: logger.debug( "No hosts or cloud_id set! Setting default host to http://127.0.0.1:9200" ) ctx.obj["client_args"].hosts = ["http://127.0.0.1:9200"] def override_other_args(ctx: Context) -> None: """ :param ctx: The Click command context :type ctx: :py:class:`Context ` :rtype: None Override :py:attr:`ctx.obj['other_args'] ` settings with any values found in :py:attr:`ctx.params ` Update :py:attr:`ctx.obj['other_args'] ` with the results. """ logger = logging.getLogger(__name__) apikey = prune_nones( { "id": ctx.params["id"], "api_key": ctx.params["api_key"], "token": ctx.params["api_token"], } ) args = prune_nones( { "master_only": ctx.params["master_only"], "skip_version_test": ctx.params["skip_version_test"], "username": ctx.params["username"], "password": ctx.params["password"], } ) args["api_key"] = apikey # Remove `api_key` root key if `id` and `api_key` and `token` are all None if ( ctx.params["id"] is None and ctx.params["api_key"] is None and ctx.params["api_token"] is None ): del args["api_key"] if args: for arg in args: logger.debug("Using value for %s provided as a command-line option", arg) ctx.obj["other_args"].update(DotMap(args)) def override_settings(settings: t.Dict, override: t.Dict) -> t.Dict: """ :param settings: The source data :param override: The data which will override `settings` :type settings: dict :type override: dict :rtype: dict :returns: An dictionary based on `settings` updated with values from `override` This function is called by :func:`cli_opts()` in order to override settings used in a :py:class:`Click Option `. Click uses decorators to establish :py:class:`options ` and :py:class:`arguments ` for a :py:class:`command `. The parameters specified for these decorator functions can be stored as default dictionaries, then expanded and overridden, if desired. In the `cli_example.py` file, the regular :py:func:`click.option decorator function ` is wrapped by :py:func:`option_wrapper() `, and is aliased as ``click_opt_wrap``. This wrapped decorator in turn calls :func:`cli_opts()` and utilizes ``*`` arg expansion. :func:`cli_opts()` references defaults, and calls this function to override keys in `settings` with values from matching keys in `override`. In the example file, this looks like this: .. code-block:: python import click from es_client.helpers.utils import option_wrapper defaults.OVERRIDE = {KEY: NEWVALUE} click_opt_wrap = option_wrapper() @click.group(context_settings=context_settings()) @click_opt_wrap(*cli_opts('OPTION1')) @click_opt_wrap(*cli_opts('OPTION2', settings=defaults.OVERRIDE)) ... @click_opt_wrap(*cli_opts('OPTIONX')) @click.pass_context def run(ctx, OPTION1, OPTION2, ..., OPTIONX): # code here The default setting KEY of ``OPTION2`` would be overriden by NEWVALUE. """ if not isinstance(override, dict): raise ConfigurationError(f"override must be of type dict: {type(override)}") for key in list(override.keys()): # This formerly checked for the presence of key in settings, but override # should add non-existing keys if desired. settings[key] = override[key] return settings es_client-8.17.4/src/es_client/helpers/logging.py000066400000000000000000000342241476634113400217740ustar00rootroot00000000000000"""Logging Helpers""" # The __future__ annotations line allows support for Python 3.8 and 3.9 to continue from __future__ import annotations import typing as t import sys import json import time import logging from logging import FileHandler, StreamHandler from voluptuous import Schema from click import Context, echo as clicho import ecs_logging from es_client.exceptions import LoggingException from es_client.defaults import config_logging, LOGDEFAULTS from es_client.helpers.schemacheck import SchemaCheck from es_client.helpers.utils import ensure_list, prune_nones # from pathlib import Path # used in the is_docker() function # pylint: disable=R0903 logger = logging.getLogger('') # Get the root logger for this module class Whitelist(logging.Filter): """ Child class inheriting :py:class:`logging.Filter`, patched to permit only specifically named :py:func:`loggers ` to write logs. """ # pylint: disable=super-init-not-called def __init__(self, *whitelist: list): """ :param whitelist: List of names defined by :py:func:`logging.getLogger()` e.g. .. code-block: python ['es_client.helpers.config', 'es_client.builder'] """ self.whitelist = [logging.Filter(name) for name in whitelist] def filter(self, record): return any(f.filter(record) for f in self.whitelist) class Blacklist(Whitelist): """ Child class inheriting :py:class:`Whitelist`, patched to permit all but specifically named :py:func:`loggers ` to write logs. A monkey-patched inversion of Whitelist, i.e. .. code-block: python return not Whitelist.filter(self, record) """ def filter(self, record): return not Whitelist.filter(self, record) class JSONFormatter(logging.Formatter): """JSON message formatting""" # The LogRecord attributes we want to carry over to the JSON message, # mapped to the corresponding output key. WANTED_ATTRS = { "levelname": "loglevel", "funcName": "function", "lineno": "linenum", "message": "message", "name": "name", } def format(self, record: logging.LogRecord) -> str: """ :param record: The incoming log message :rtype: :py:meth:`json.dumps` """ self.converter = time.gmtime fmt = "%Y-%m-%dT%H:%M:%S" mil = str(record.msecs).split(".", maxsplit=1)[0] timestamp = f"{self.formatTime(record, datefmt=fmt)}.{mil}Z" result = {"@timestamp": timestamp} available = record.__dict__ # This is cleverness because 'message' is NOT a member key of # ``record.__dict__`` the ``getMessage()`` method is effectively ``msg % args`` # (actual keys) By manually adding 'message' to ``available``, it simplifies # the code available["message"] = record.getMessage() for attribute in set(self.WANTED_ATTRS).intersection(available): result = deepmerge( de_dot(self.WANTED_ATTRS[attribute], getattr(record, attribute)), result ) # The following is mostly for mimicking the ecs format. You can't have 2x # 'message' keys in WANTED_ATTRS, so we set the value to 'log.original' for # ecs, and this code block guarantees it still appears as 'message' too. if "message" not in result.items(): result["message"] = available["message"] return json.dumps(result, sort_keys=True) def check_logging_config(config: t.Dict) -> Schema: """ :param config: Logging configuration data :type config: dict :returns: :py:class:`~.es_client.helpers.schemacheck.SchemaCheck` validated logging configuration. Ensure that the top-level key ``logging`` is in `config`. Set empty default dictionary if key ``logging`` is not in `config`. Pass the result to :py:class:`~.es_client.helpers.schemacheck.SchemaCheck` for full validation. """ if not isinstance(config, dict): clicho( f"Must supply logging information as a dictionary. " f'You supplied: "{config}" which is "{type(config)}"' f"Using default logging values." ) log_settings = {} elif "logging" not in config: # None provided. Use defaults. log_settings = {} else: if config["logging"]: log_settings = prune_nones(config["logging"]) else: log_settings = {} return SchemaCheck( log_settings, config_logging(), "Logging Configuration", "logging" ).result() def configure_logging(ctx: Context) -> None: """ :param ctx: The Click command context :type params: :py:class:`~.click.Context` :rtype: None Configure logging based on a combination of :py:attr:`ctx.obj['draftcfg'] ` and :py:attr:`ctx.params `. Values in :py:attr:`ctx.params ` will override anything set in :py:attr:`ctx.obj['draftcfg'] ` """ logcfg = override_logging(ctx) # Now enable logging with the merged settings, verifying the settings are still good set_logging(logcfg) def de_dot(dot_string: str, msg: str) -> t.Union[t.Dict[str, str], None]: """ :param dot_string: The dotted string :param msg: The message :type dot_string: str :type msg: str :rtype: dict :returns: A nested dictionary of keys with the final value being the message Turn `message` and `dot_string` into a nested dictionary. Used by :py:class:`JSONFormatter` """ arr = dot_string.split(".") arr.append(msg) retval = None for idx in range(len(arr), 1, -1): if not retval: try: retval = {arr[idx - 2]: arr[idx - 1]} except Exception as err: raise LoggingException(err) from err else: try: new_d = {arr[idx - 2]: retval} retval = new_d except Exception as err: raise LoggingException(err) from err return retval def deepmerge(source: t.Dict, destination: t.Dict) -> t.Dict: """ :param source: Source dictionary :param destination: Destination dictionary :type source: dict :type destination: dict :returns: destination :rtype: dict Recursively merge deeply nested dictionary structure `source` into `destination`. Used by :py:class:`JSONFormatter` """ for key, value in source.items(): if isinstance(value, dict): node = destination.setdefault(key, {}) deepmerge(value, node) else: destination[key] = value return destination def get_format_string(nll: int) -> str: """ :param nll: The numeric log level :type nll: int :rtype: str :returns: The format string based on the numeric log level """ return ( "%(asctime)s %(levelname)-9s %(name)22s " "%(funcName)22s:%(lineno)-4d %(message)s" if nll == 10 else "%(asctime)s %(levelname)-9s %(message)s" ) def get_logger(log_opts: t.Dict) -> None: """Get the root logger with the appropriate handler(s) attached If a log file is provided in `log_opts`, a :py:class:`~.logging.FileHandler` is returned. If not, it will split logs into stdout and stderr, with the former handling messages up to INFO level, and the latter handling messages above that level. :param log_opts: Logging configuration data logger_name: Default logger name to use in :py:func:`logging.getLogger()` :type log_opts: dict logger_name: str :rtype logging.Logger :returns The root logger with the appropriate handler(s) attached """ logfile = log_opts.get("logfile", None) kind = log_opts.get("logformat", "default") nll = get_numeric_loglevel(log_opts.get("loglevel", "INFO")) # Set the level for the root logger logger.setLevel(nll) handler_map = { # We can't set FileHandler to a null pointer/None "logfile": FileHandler(logfile) if logfile else None, "stdout": StreamHandler(stream=sys.stdout), "stderr": StreamHandler(stream=sys.stderr), } format_map = { "default": logging.Formatter(get_format_string(nll)), "json": JSONFormatter(), "ecs": ecs_logging.StdlibFormatter(), } def add_handler(source: t.Literal['logfile', 'stdout', 'stderr']) -> None: handler = handler_map[source] handler.setFormatter(format_map[kind]) handler.setLevel(nll) if source == 'stdout': handler.addFilter(lambda record: record.levelno <= logging.INFO) if source == 'stderr': # Establish our upper bound filter for stderr, in case it's set to # ERROR or CRITICAL, and filter them. handler.setLevel(logging.WARNING) fltr = max(logging.WARNING, nll) handler.addFilter(lambda record: record.levelno >= fltr) for entry in ensure_list(log_opts["blacklist"]): handler.addFilter(Blacklist(entry)) logger.addHandler(handler) # Add to the root logger # if we have a logfile, then use that if logfile: add_handler('logfile') else: add_handler('stdout') add_handler('stderr') def get_numeric_loglevel(level: str) -> int: """ :param level: The log level :type level: str :rtype: int :returns: A numeric value mapped from `level`. The mapping is as follows: .. list-table:: Log Levels :widths: 10 5 85 :header-rows: 1 * - Level - # - Description * - NOTSET - 0 - When set on a logger, indicates that ancestor loggers are to be consulted to determine the effective level. If that still resolves to NOTSET, then all events are logged. When set on a handler, all events are handled. * - DEBUG - 10 - Detailed information, typically only of interest to a developer trying to diagnose a problem. * - INFO - 20 - Confirmation that things are working as expected. * - WARNING - 30 - An indication that something unexpected happened, or that a problem might occur in the near future (e.g. 'disk space low'). The software is still working as expected. * - ERROR - 40 - Due to a more serious problem, the software has not been able to perform some function. * - CRITICAL - 50 - A serious error, indicating that the program itself may be unable to continue running. Raises a :py:exc:`ValueError` exception if an invalid value for `level` is provided. """ numeric_log_level = getattr(logging, level.upper(), None) if not isinstance(numeric_log_level, int): raise ValueError(f"Invalid log level: {level}") return numeric_log_level # def is_docker() -> bool: # """ # :rtype: bool # :returns: Boolean result of whether we are runinng in a Docker container or not # """ # cgroup = Path("/proc/self/cgroup") # return ( # Path("/.dockerenv").is_file() # or cgroup.is_file() # and "docker" in cgroup.read_text(encoding="utf8") # ) def override_logging(ctx: Context) -> t.Dict: """ :param ctx: The Click command context :type params: :py:class:`~.click.Context` :returns: Log configuration ready for validation Get logging configuration from `ctx.obj['draftcfg']` and override with any command-line options """ # Check for log settings from config file init_logcfg = check_logging_config(ctx.obj["draftcfg"]) # Set debug to True if config file says loglevel is DEBUG debug = "loglevel" in init_logcfg and init_logcfg["loglevel"] == "DEBUG" # if 'loglevel' is not None if "loglevel" in ctx.params and ctx.params["loglevel"] is not None: # Set debug to True if command-line options says loglevel is DEBUG, # otherwise set debug to False (overriding what was set by config file) debug = ctx.params["loglevel"] == "DEBUG" # Override anything with options from the command-line paramlist = ["loglevel", "logfile", "logformat", "blacklist"] for entry in paramlist: if entry in ctx.params: if not ctx.params[entry]: continue # Output to stdout if debug is True and we're not overriding a None # (the default) and we're not overriding DEBUG with DEBUG ;) if ( debug and init_logcfg[entry] is not None and init_logcfg["loglevel"] != "DEBUG" ): clicho( f"DEBUG: Overriding configuration file setting {entry}=" f"{init_logcfg[entry]} with command-line option {entry}=" f"{ctx.params[entry]}" ) if entry == "blacklist": init_logcfg[entry] = list(ctx.params[entry]) else: init_logcfg[entry] = ctx.params[entry] return init_logcfg def check_log_opts(log_opts: t.Dict) -> t.Dict: """ :param log_opts: Logging configuration data :returns: Updated `log_opts` dictionary with default values where unset """ for k, v in LOGDEFAULTS.items(): log_opts[k] = v if k not in log_opts else log_opts[k] return log_opts def set_logging(options: t.Dict) -> None: """ :param options: Logging configuration data :param logger_name: Default logger name to use in :py:func:`logging.getLogger()` Configure global logging options from `options` and set a default `logger_name` """ log_opts = check_log_opts(options) get_logger(log_opts) # Set up NullHandler() to handle nested elasticsearch8.trace Logger # instance in elasticsearch python client logging.getLogger("elasticsearch8.trace").addHandler(logging.NullHandler()) if log_opts["blacklist"]: for entry in ensure_list(log_opts["blacklist"]): for handler in logging.root.handlers: handler.addFilter(Blacklist(entry)) es_client-8.17.4/src/es_client/helpers/schemacheck.py000066400000000000000000000117041476634113400226020ustar00rootroot00000000000000"""SchemaCheck class and associated functions""" # pylint: disable=protected-access, broad-except import typing as t import logging from re import sub from copy import deepcopy from voluptuous import Schema from es_client.defaults import KEYS_TO_REDACT from es_client.exceptions import FailedValidation def password_filter(data: t.Dict) -> t.Dict: """ :param data: Configuration data :returns: A :py:class:`~.copy.deepcopy` of `data` with the value obscured by ``REDACTED`` if the key is one of :py:const:`~.es_client.defaults.KEYS_TO_REDACT`. Recursively look through all nested structures of `data` for keys from :py:const:`~.es_client.defaults.KEYS_TO_REDACT` and redact the value with ``REDACTED`` """ def iterdict(mydict): for key, value in mydict.items(): if isinstance(value, dict): iterdict(value) elif key in KEYS_TO_REDACT: mydict.update({key: "REDACTED"}) return mydict return iterdict(deepcopy(data)) class SchemaCheck: """ :param config: A configuration dictionary. :param schema: A voluptuous schema definition :param test_what: which configuration block is being validated :param location: An string to report which configuration sub-block is being tested. :type config: dict :type schema: :py:class:`~.voluptuous.schema_builder.Schema` :type test_what: str :type location: str Validate `config` with the provided :py:class:`~.voluptuous.schema_builder.Schema`. :py:attr:`~.es_client.helpers.schemacheck.SchemaCheck.test_what` and :py:attr:`~.es_client.helpers.schemacheck.SchemaCheck.location` are used for reporting in case of failure. If validation is successful, the :py:meth:`~.es_client.helpers.schemacheck.SchemaCheck.result` method returns :py:attr:`~.es_client.helpers.schemacheck.SchemaCheck.config`. """ def __init__(self, config: t.Dict, schema: Schema, test_what: str, location: str): self.logger = logging.getLogger(__name__) # Set the Schema for validation... self.logger.debug("Schema: %s", schema) if isinstance(config, dict): self.logger.debug('"%s" config: %s', test_what, password_filter(config)) else: self.logger.debug('"%s" config: %s', test_what, config) #: Object attribute that gets the value of param `config` self.config = config #: Object attribute that gets the value of param `schema` self.schema = schema #: Object attribute that gets the value of param `test_what` self.test_what = test_what #: Object attribute that gets the value of param `location` self.location = location #: Object attribute that is initialized with the value ``no bad value yet`` self.badvalue = "no bad value yet" #: Object attribute that is initialized with the value ``No error yet`` self.error = "No error yet" def parse_error(self) -> t.Any: """ Report the error, and try to report the bad key or value as well. """ def get_badvalue(data_string, data): elements = sub(r"[\'\]]", "", data_string).split("[") elements.pop(0) # Get rid of data as the first element value = None for k in elements: try: key = int(k) except ValueError: key = k if value is None: value = data[key] # if this fails, it's caught below return value try: self.badvalue = get_badvalue(str(self.error).split()[-1], self.config) except Exception as exc: self.logger.error("Unable to extract value: %s", exc) self.badvalue = "(could not determine)" def result(self) -> Schema: """ :rtype: Schema :returns: :py:attr:`~.es_client.helpers.schemacheck.SchemaCheck.config` If validation is successful, return the value of :py:attr:`~.es_client.helpers.schemacheck.SchemaCheck.config` If unsuccessful, try to parse the error in :py:meth:`~.es_client.helpers.schemacheck.SchemaCheck.parse_error` and raise a :py:exc:`FailedValidation ` exception. """ try: return self.schema(self.config) except Exception as exc: try: # pylint: disable=E1101 self.error = exc.errors[0] except Exception as err: self.logger.error("Could not parse exception: %s", err) self.error = f"{exc}" self.parse_error() self.logger.error("Schema error: %s", self.error) raise FailedValidation( f"Configuration: {self.test_what}: Location: {self.location}: " f'Bad Value: "{self.badvalue}", {self.error}. Check configuration file.' ) from exc es_client-8.17.4/src/es_client/helpers/utils.py000066400000000000000000000166371476634113400215160ustar00rootroot00000000000000"""Helper Utility Functions""" import typing as t import logging import os import re import base64 import binascii from pathlib import Path import yaml # type: ignore import click from elasticsearch8 import Elasticsearch from es_client.defaults import ES_DEFAULT, config_schema from es_client.exceptions import ConfigurationError from es_client.helpers.schemacheck import SchemaCheck logger = logging.getLogger(__name__) def check_config(config: dict, quiet: bool = False) -> dict: """ :param config: The configuration :returns: A validated configuration dictionary for :py:class:`~.es_client.builder.Builder` Ensure that the top-level key ``elasticsearch`` and its sub-keys, ``other_settings`` and ``client`` are contained in `config` before passing it (or empty defaults) to :class:`~es_client.helpers.schemacheck.SchemaCheck` for value validation. """ if not isinstance(config, dict): logger.warning( "Elasticsearch client configuration must be provided as a dictionary." ) logger.warning('You supplied: "%s" which is "%s".', config, type(config)) logger.warning("Using default values.") es_settings = ES_DEFAULT elif "elasticsearch" not in config: # I only need this to be logged when Builder is initializing if not quiet: logger.warning( 'No "elasticsearch" setting in supplied configuration. Using defaults.' ) es_settings = ES_DEFAULT else: es_settings = config for key in ["client", "other_settings"]: if key not in es_settings["elasticsearch"]: es_settings["elasticsearch"][key] = {} else: es_settings["elasticsearch"][key] = prune_nones( es_settings["elasticsearch"][key] ) return SchemaCheck( es_settings["elasticsearch"], config_schema(), "Elasticsearch Configuration", "elasticsearch", ).result() def ensure_list(data) -> list: """ :param data: A list or scalar variable to act upon Return a :py:class:`list`, even if `data` is a single value """ if not isinstance(data, list): # in case of a single value passed data = [data] return data def file_exists(file: str) -> bool: """ :param file: The file to test Verify `file` exists """ return Path(file).is_file() def get_version(client: Elasticsearch) -> t.Tuple: """ :param client: An Elasticsearch client object :type client: :py:class:`~.elasticsearch.Elasticsearch` :returns: The Elasticsearch version as a 3-part tuple, (major, minor, patch) Get the Elasticsearch version of the connected node """ version = client.info()["version"]["number"] # Split off any -dev, -beta, or -rc tags version = version.split("-")[0] # Only take SEMVER (drop any fields over 3) if len(version.split(".")) > 3: version = version.split(".")[:-1] else: version = version.split(".") return tuple(map(int, version)) def get_yaml(path: str) -> t.Dict: """ :param path: The path to a YAML configuration file. :returns: The contents of `path` translated from YAML to :py:class:`dict` Read the file identified by `path` and import its YAML contents. """ # Set the stage here to parse single scalar value environment vars from # the YAML file being read single = re.compile(r"^\$\{(.*)\}$") yaml.add_implicit_resolver("!single", single) def single_constructor(loader, node): value = loader.construct_scalar(node) proto = single.match(value).group(1) default = None if len(proto.split(":")) > 1: envvar, default = proto.split(":") else: envvar = proto return os.environ[envvar] if envvar in os.environ else default yaml.add_constructor("!single", single_constructor) try: return yaml.load(read_file(path), Loader=yaml.FullLoader) except (yaml.scanner.ScannerError, yaml.parser.ParserError) as exc: raise ConfigurationError(f"Unable to parse YAML file. Error: {exc}") from exc def option_wrapper() -> t.Callable: """ :py:func:`~.es_client.helpers.utils.passthrough()` the :py:func:`click.option` decorator function. """ return passthrough(click.option) def parse_apikey_token(token: str) -> t.Tuple: """ :param token: The base64 encoded API Key :returns: A tuple of (id, api_key) Split a base64 encoded API Key Token into id and api_key """ try: decoded = base64.b64decode(token).decode("utf-8") split = decoded.split(":") except (binascii.Error, IndexError, UnicodeDecodeError) as exc: raise ConfigurationError( f"Unable to parse base64 API Key Token: {exc}" ) from exc return (split[0], split[1]) def passthrough(func) -> t.Callable: """Wrapper to make it easy to store click configuration elsewhere""" return lambda a, k: func(*a, **k) def prune_nones(mydict: t.Dict) -> t.Dict: """ :param mydict: The dictionary to act on Remove keys from `mydict` whose values are `None` """ # Test for `None` instead of existence or zero values will be caught return dict([(k, v) for k, v in mydict.items() if v is not None and v != "None"]) def read_file(myfile: str) -> str: """ :param myfile: A file to read. Read a file and return the resulting data. Raise an :py:exc:`~.es_client.exceptions.ConfigurationError` exception if the file is unable to be read. """ try: with open(myfile, "r", encoding="utf-8") as f: data = f.read() return data except IOError as exc: msg = f"Unable to read file {myfile}. Exception: {exc}" logger.error(msg) raise ConfigurationError(msg) from exc def verify_ssl_paths(args: t.Dict) -> None: """ :param args: The ``client`` block of the config dictionary. Verify that the various certificate/key paths are readable. The :py:func:`~.es_client.helpers.utils.read_file` function will raise a :py:exc:`~.es_client.exceptions.ConfigurationError` if a file fails to be read. """ # Test whether certificate is a valid file path if "ca_certs" in args and args["ca_certs"] is not None: read_file(args["ca_certs"]) # Test whether client_cert is a valid file path if "client_cert" in args and args["client_cert"] is not None: read_file(args["client_cert"]) # Test whether client_key is a valid file path if "client_key" in args and args["client_key"] is not None: read_file(args["client_key"]) def verify_url_schema(url: str) -> str: """ :param url: The url to verify :returns: Verified URL Ensure that a valid URL schema (HTTP[S]://URL:PORT) is used Raise a :py:exc:`~.es_client.exceptions.ConfigurationError` exception if a URL schema is invalid for any reason. """ parts = url.lower().split(":") errmsg = f"URL Schema invalid for {url}" if len(parts) < 3: # We do not have a port if parts[0] == "https": port = "443" elif parts[0] == "http": port = "80" else: raise ConfigurationError(errmsg) elif len(parts) == 3: if (parts[0] != "http") and (parts[0] != "https"): raise ConfigurationError(errmsg) port = parts[2] else: raise ConfigurationError(errmsg) return parts[0] + ":" + parts[1] + ":" + port es_client-8.17.4/src/es_client/py.typed000066400000000000000000000000001476634113400200120ustar00rootroot00000000000000es_client-8.17.4/src/local_test.py000077500000000000000000000006021476634113400170640ustar00rootroot00000000000000#!/usr/bin/env python """Script to run locally""" from click import echo from es_client.cli_example import run if __name__ == '__main__': try: # This is because click uses decorators, and pylint doesn't catch that # pylint: disable=no-value-for-parameter run() except RuntimeError as err: import sys echo(f'{err}') sys.exit(1) es_client-8.17.4/tests/000077500000000000000000000000001476634113400147335ustar00rootroot00000000000000es_client-8.17.4/tests/__init__.py000066400000000000000000000000001476634113400170320ustar00rootroot00000000000000es_client-8.17.4/tests/integration/000077500000000000000000000000001476634113400172565ustar00rootroot00000000000000es_client-8.17.4/tests/integration/__init__.py000066400000000000000000000003311476634113400213640ustar00rootroot00000000000000"""Integration setup""" from os import environ HOST = environ.get("TEST_ES_SERVER", "https://127.0.0.1:9200") USER = environ.get("TEST_USER", "elastic") PASS = environ.get("TEST_PASS") CACRT = environ.get("CA_CRT") es_client-8.17.4/tests/integration/test_builder.py000066400000000000000000000056771476634113400223340ustar00rootroot00000000000000"""Test the Builder class""" from unittest import TestCase import pytest from es_client.builder import Builder from es_client.exceptions import ConfigurationError, ESClientException, NotMaster from . import CACRT, HOST, PASS, USER config = { "elasticsearch": { "other_settings": {"username": USER, "password": PASS}, "client": {"hosts": HOST, "ca_certs": CACRT}, } } # pylint: disable=protected-access class TestCheckMaster(TestCase): """Test 'check master' functionality""" def test_multiple_hosts_raises(self): """Raise exception if multiple hosts are specified and 'master_only' is True""" local_conf = { "elasticsearch": { "other_settings": { "master_only": True, "username": USER, "password": PASS, }, "client": { "hosts": [HOST], "ca_certs": CACRT, }, } } obj = Builder(configdict=local_conf, autoconnect=False) obj._get_client() # Cheating in an extra HOST here obj.client_args.hosts.append(HOST) with pytest.raises(ConfigurationError): obj._check_multiple_hosts() def test_exit_if_not_master(self): """Raise NotMaster if node is not master""" obj = Builder(config, autoconnect=False) obj.master_only = True obj._get_client() obj._find_master() # Cheating in a False result for is_master obj.is_master = False with pytest.raises(NotMaster): obj._check_if_master() class TestCheckVersion(TestCase): """Check ES version""" def test_skip_version_check(self): """Skip version check results in None being returned""" obj = Builder(configdict=config, autoconnect=False) obj.skip_version_test = True obj._get_client() assert obj._check_version() is None def test_bad_version_raises(self): """Raise ESClientException if version is out of bounds""" obj = Builder(configdict=config, autoconnect=False) obj.version_min = (98, 98, 98) obj.version_max = (99, 99, 99) obj._get_client() with pytest.raises(ESClientException): obj._check_version() class TestConnection(TestCase): """Test client connection""" def test_incomplete_dict_passed(self): """Sending a proper dictionary but None value for hosts will raise ValueError""" cfg = {"elasticsearch": {"client": {"hosts": None}}} with pytest.raises(ValueError): Builder(configdict=cfg, autoconnect=True) def test_client_info(self): """Proper connection to client makes for a good response""" obj = Builder(configdict=config, autoconnect=True) client = obj.client expected = dict(client.info()) assert expected['cluster_name'] == dict(obj.test_connection())['cluster_name'] es_client-8.17.4/tests/integration/test_cli_example.py000066400000000000000000000057301476634113400231560ustar00rootroot00000000000000"""Test cli_example""" from os import devnull from unittest import TestCase from click import testing as clicktest from es_client.cli_example import run from . import CACRT, HOST, PASS, USER from ..unit import FileTestObj YAMLCONFIG = "\n".join( [ "---", "logging:", " loglevel: INFO", " logfile:", " logformat: default", " blacklist: ['elastic_transport', 'urllib3']", ] ) class TestCLIExample(TestCase): """Test CLI Example""" def test_basic_operation(self): "Ensure basic functionality" args = [ "--hosts", HOST, "--username", USER, "--password", PASS, "--ca_certs", CACRT, "--loglevel", "DEBUG", "test-connection", ] runner = clicktest.CliRunner(mix_stderr=True) result = runner.invoke(run, args) assert result.exit_code == 0 def test_show_all_options(self): """Ensure show-all-options works""" args = ["show-all-options"] runner = clicktest.CliRunner() result = runner.invoke(run, args=args) assert result.exit_code == 0 def test_logging_options_json(self): """Testing JSON log options""" args = [ "--hosts", HOST, "--username", USER, "--password", PASS, "--ca_certs", CACRT, "--loglevel", "DEBUG", "--logformat", "json", "test-connection", ] runner = clicktest.CliRunner() result = runner.invoke(run, args=args) assert result.exit_code == 0 def test_logging_options_ecs(self): """Testing ECS log options""" args = [ "--hosts", HOST, "--username", USER, "--password", PASS, "--ca_certs", CACRT, "--loglevel", "WARNING", "--logfile", devnull, "--logformat", "ecs", "test-connection", ] runner = clicktest.CliRunner() result = runner.invoke(run, args) assert result.exit_code == 0 def test_logging_options_from_config_file(self): """Testing logging options from a config file""" # Build file_obj = FileTestObj() file_obj.write_config(file_obj.args["configfile"], YAMLCONFIG) # Test args = [ "--config", file_obj.args["configfile"], "--hosts", HOST, "--username", USER, "--password", PASS, "--ca_certs", CACRT, "test-connection", ] runner = clicktest.CliRunner() result = runner.invoke(run, args) assert result.exit_code == 0 # Teardown file_obj.teardown() es_client-8.17.4/tests/integration/test_helpers_config.py000066400000000000000000000024761476634113400236670ustar00rootroot00000000000000"""Test helpers.config""" from unittest import TestCase import pytest from dotmap import DotMap # type: ignore from elasticsearch8 import Elasticsearch from es_client.defaults import ES_DEFAULT from es_client.exceptions import ESClientException from es_client.helpers import config from . import CACRT, HOST, PASS, USER CONFIG = { "elasticsearch": { "other_settings": {"username": USER, "password": PASS}, "client": {"hosts": HOST, "ca_certs": CACRT}, } } class TestGetClient(TestCase): """Test get_client functionality""" def test_basic_operation(self): """Ensure basic operation""" assert isinstance(config.get_client(configdict=CONFIG), Elasticsearch) def test_raises_when_no_connection(self): """ Ensures that an exception is raised when it cannot connect to Elasticsearch """ client_args = DotMap() other_args = DotMap() client_args.update(DotMap(ES_DEFAULT)) client_args.hosts = ["http://127.0.0.123:12345"] client_args.request_timeout = 0.1 cnf = { "elasticsearch": { "client": client_args.toDict(), "other_settings": other_args.toDict(), } } with pytest.raises(ESClientException): _ = config.get_client(configdict=cnf) es_client-8.17.4/tests/unit/000077500000000000000000000000001476634113400157125ustar00rootroot00000000000000es_client-8.17.4/tests/unit/__init__.py000066400000000000000000000221701476634113400200250ustar00rootroot00000000000000"""Functions and classes used for tests""" import os import random import shutil import string import tempfile import click from es_client.defaults import LOGGING_SETTINGS from es_client.helpers import config as cfgfn from es_client.helpers.utils import option_wrapper, prune_nones ONOFF = {"on": "", "off": "no-"} DEFAULT_HOST = "http://127.0.0.1:9200" DEFAULTCFG = "\n".join( ["---", "elasticsearch:", " client:", f" hosts: [{DEFAULT_HOST}]"] ) EMPTYCFG = ( "---\n" "elasticsearch:\n" " client:\n" " hosts: \n" " - \n" " cloud_id: \n" ) TESTUSER = "joe_user" TESTPASS = "password" YAMLCONFIG = ( "---\n" "elasticsearch:\n" " client:\n" f" hosts: [{DEFAULT_HOST}]\n" " other_settings:\n" " username: {0}\n" " password: {1}\n" ) click_opt_wrap = option_wrapper() def random_directory(): """Create a random dictionary""" dirname = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(8) ) directory = tempfile.mkdtemp(suffix=dirname) if not os.path.exists(directory): os.makedirs(directory) return directory class FileTestObj(object): """All file tests will use this object""" def __init__(self): self.args = {} dirname = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(8) ) filename = "".join( random.choice(string.ascii_uppercase + string.digits) for _ in range(8) ) # This will create a psuedo-random temporary directory on the machine # which runs the unit tests, but NOT on the machine where elasticsearch # is running. This means tests may fail if run against remote instances # unless you explicitly set `self.args['location']` to a proper spot # on the target machine. self.written_value = """NOTHING""" self.args["tmpdir"] = tempfile.mkdtemp(suffix=dirname) if not os.path.exists(self.args["tmpdir"]): os.makedirs(self.args["tmpdir"]) self.args["configdir"] = random_directory() self.args["configfile"] = os.path.join(self.args["configdir"], "es_client.yml") self.args["filename"] = os.path.join(self.args["tmpdir"], filename) self.args["no_file_here"] = os.path.join(self.args["tmpdir"], "not_created") with open(self.args["filename"], "w", encoding="utf-8") as f: f.write(self.written_value) def teardown(self): """Default teardown""" if os.path.exists(self.args["tmpdir"]): shutil.rmtree(self.args["tmpdir"]) if os.path.exists(self.args["configdir"]): shutil.rmtree(self.args["configdir"]) def write_config(self, fname, data): """Write config to named file""" with open(fname, "w", encoding="utf-8") as f: f.write(data) # pylint: disable=unused-argument, redefined-builtin, too-many-arguments @click.command() @click_opt_wrap(*cfgfn.cli_opts("config")) @click_opt_wrap(*cfgfn.cli_opts("hosts")) @click_opt_wrap(*cfgfn.cli_opts("cloud_id")) @click_opt_wrap(*cfgfn.cli_opts("api_token")) @click_opt_wrap(*cfgfn.cli_opts("id")) @click_opt_wrap(*cfgfn.cli_opts("api_key")) @click_opt_wrap(*cfgfn.cli_opts("username")) @click_opt_wrap(*cfgfn.cli_opts("password")) @click_opt_wrap(*cfgfn.cli_opts("bearer_auth")) @click_opt_wrap(*cfgfn.cli_opts("opaque_id")) @click_opt_wrap(*cfgfn.cli_opts("request_timeout")) @click_opt_wrap(*cfgfn.cli_opts("http_compress", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("verify_certs", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("ca_certs")) @click_opt_wrap(*cfgfn.cli_opts("client_cert")) @click_opt_wrap(*cfgfn.cli_opts("client_key")) @click_opt_wrap(*cfgfn.cli_opts("ssl_assert_hostname")) @click_opt_wrap(*cfgfn.cli_opts("ssl_assert_fingerprint")) @click_opt_wrap(*cfgfn.cli_opts("ssl_version")) @click_opt_wrap(*cfgfn.cli_opts("master-only", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("skip_version_test", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("loglevel", settings=LOGGING_SETTINGS)) @click_opt_wrap(*cfgfn.cli_opts("logfile", settings=LOGGING_SETTINGS)) @click_opt_wrap(*cfgfn.cli_opts("logformat", settings=LOGGING_SETTINGS)) @click.pass_context def simulator( ctx, config, hosts, cloud_id, api_token, id, api_key, username, password, bearer_auth, opaque_id, request_timeout, http_compress, verify_certs, ca_certs, client_cert, client_key, ssl_assert_hostname, ssl_assert_fingerprint, ssl_version, master_only, skip_version_test, loglevel, logfile, logformat, ): """Test command with all regular options""" ctx.obj = {} cfgfn.get_config(ctx) cfgfn.generate_configdict(ctx) click.echo(f'{ctx.obj["configdict"]}') # pylint: disable=unused-argument @click.command() @click_opt_wrap(*cfgfn.cli_opts("config")) @click_opt_wrap(*cfgfn.cli_opts("hosts")) @click_opt_wrap(*cfgfn.cli_opts("cloud_id")) @click_opt_wrap(*cfgfn.cli_opts("api_token")) @click_opt_wrap(*cfgfn.cli_opts("id")) @click_opt_wrap(*cfgfn.cli_opts("api_key")) @click_opt_wrap(*cfgfn.cli_opts("username")) @click_opt_wrap(*cfgfn.cli_opts("password")) @click_opt_wrap(*cfgfn.cli_opts("bearer_auth")) @click_opt_wrap(*cfgfn.cli_opts("opaque_id")) @click_opt_wrap(*cfgfn.cli_opts("request_timeout")) @click_opt_wrap(*cfgfn.cli_opts("http_compress", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("verify_certs", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("ca_certs")) @click_opt_wrap(*cfgfn.cli_opts("client_cert")) @click_opt_wrap(*cfgfn.cli_opts("client_key")) @click_opt_wrap(*cfgfn.cli_opts("ssl_assert_hostname")) @click_opt_wrap(*cfgfn.cli_opts("ssl_assert_fingerprint")) @click_opt_wrap(*cfgfn.cli_opts("ssl_version")) @click_opt_wrap(*cfgfn.cli_opts("master-only", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("skip_version_test", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("loglevel", settings=LOGGING_SETTINGS)) @click_opt_wrap(*cfgfn.cli_opts("logfile", settings=LOGGING_SETTINGS)) @click_opt_wrap(*cfgfn.cli_opts("logformat", settings=LOGGING_SETTINGS)) @click.pass_context def default_config_cmd( ctx, config, hosts, cloud_id, api_token, id, api_key, username, password, bearer_auth, opaque_id, request_timeout, http_compress, verify_certs, ca_certs, client_cert, client_key, ssl_assert_hostname, ssl_assert_fingerprint, ssl_version, master_only, skip_version_test, loglevel, logfile, logformat, ): """Test command with all regular options""" # Build config file file_obj = FileTestObj() file_obj.write_config( file_obj.args["configfile"], YAMLCONFIG.format(TESTUSER, TESTPASS) ) # User config file ctx.obj = {"default_config": file_obj.args["configfile"]} cfgfn.get_config(ctx) # Teardown config file file_obj.teardown() # Finish the function cfgfn.generate_configdict(ctx) click.echo(f'{ctx.obj["configdict"]}') @click.command() @click_opt_wrap(*cfgfn.cli_opts("config")) @click_opt_wrap(*cfgfn.cli_opts("hosts")) @click_opt_wrap(*cfgfn.cli_opts("cloud_id")) @click_opt_wrap(*cfgfn.cli_opts("api_token")) @click_opt_wrap(*cfgfn.cli_opts("id")) @click_opt_wrap(*cfgfn.cli_opts("api_key")) @click_opt_wrap(*cfgfn.cli_opts("username")) @click_opt_wrap(*cfgfn.cli_opts("password")) @click_opt_wrap(*cfgfn.cli_opts("bearer_auth")) @click_opt_wrap(*cfgfn.cli_opts("opaque_id")) @click_opt_wrap(*cfgfn.cli_opts("request_timeout")) @click_opt_wrap(*cfgfn.cli_opts("http_compress", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("verify_certs", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("ca_certs")) @click_opt_wrap(*cfgfn.cli_opts("client_cert")) @click_opt_wrap(*cfgfn.cli_opts("client_key")) @click_opt_wrap(*cfgfn.cli_opts("ssl_assert_hostname")) @click_opt_wrap(*cfgfn.cli_opts("ssl_assert_fingerprint")) @click_opt_wrap(*cfgfn.cli_opts("ssl_version")) @click_opt_wrap(*cfgfn.cli_opts("master-only", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("skip_version_test", onoff=ONOFF)) @click_opt_wrap(*cfgfn.cli_opts("loglevel", settings=LOGGING_SETTINGS)) @click_opt_wrap(*cfgfn.cli_opts("logfile", settings=LOGGING_SETTINGS)) @click_opt_wrap(*cfgfn.cli_opts("logformat", settings=LOGGING_SETTINGS)) @click.pass_context def simulate_override_client_args( ctx, config, hosts, cloud_id, api_token, id, api_key, username, password, bearer_auth, opaque_id, request_timeout, http_compress, verify_certs, ca_certs, client_cert, client_key, ssl_assert_hostname, ssl_assert_fingerprint, ssl_version, master_only, skip_version_test, loglevel, logfile, logformat, ): """Test command with all regular options""" ctx.obj = {} cfgfn.get_config(ctx) cfgfn.get_arg_objects(ctx) # Manual override ctx.obj["client_args"].hosts = None cfgfn.override_client_args(ctx) ctx.obj["configdict"] = { "elasticsearch": { "client": prune_nones(ctx.obj["client_args"].toDict()), "other_settings": prune_nones(ctx.obj["other_args"].toDict()), } } click.echo(f'{ctx.obj["configdict"]}') es_client-8.17.4/tests/unit/test_builder.py000066400000000000000000000214541476634113400207570ustar00rootroot00000000000000"""Test helpers.schemacheck""" from unittest import TestCase import certifi import click import pytest from es_client.builder import Builder from es_client.exceptions import ConfigurationError from . import FileTestObj DEFAULT = {"elasticsearch": {"client": {"hosts": ["http://127.0.0.1:9200"]}}} YAMLCONFIG = "\n".join( ["---", "elasticsearch:", " client:", " hosts:", " - {0}\n"] ) # pylint: disable=protected-access def process_cmd(key): """Return the key from the click context's object""" return click.get_current_context().obj[key] class TestInit(TestCase): """Test initializing a Builder object""" def test_read_config_file_old(self): """Ensure that the value of es_url is passed to hosts""" es_url = "http://127.0.0.1:9200" # Build file_obj = FileTestObj() file_obj.write_config(file_obj.args["configfile"], YAMLCONFIG.format(es_url)) # Test build_obj = Builder(configfile=file_obj.args["configfile"]) assert build_obj.client_args.hosts[0] == es_url # Teardown file_obj.teardown() def test_assign_defaults(self): """ Ensure that the default URL is passed to hosts when an empty config dict is passed """ obj = Builder(configdict={}) assert obj.client_args.hosts == ["http://127.0.0.1:9200"] def test_raises_for_both_hosts_and_cloud_id(self): """ Ensure that ConfigurationError is Raised when both hosts and cloud_id are passed """ test = { "elasticsearch": { "client": {"hosts": ["http://10.1.2.3:4567"], "cloud_id": "foo:bar"} } } with pytest.raises(ConfigurationError): _ = Builder(configdict=test) def test_remove_default_hosts_when_cloud_id(self): """ Ensure that only a default hosts url is removed when cloud_id is also passed """ test = { "elasticsearch": { "client": {"hosts": ["http://127.0.0.1:9200"], "cloud_id": "foo:bar"} } } obj = Builder(configdict=test) assert obj.client_args.hosts is None def test_url_schema_validation_fix(self): """Ensure that :443 is appended to a host with https and no port""" test = {"elasticsearch": {"client": {"hosts": ["https://127.0.0.1"]}}} obj = Builder(configdict=test) assert "https://127.0.0.1:443" == obj.client_args.hosts[0] def test_url_schema_validation_raises(self): """Ensure that ConfigurationError is raised with an invalid host URL schema""" test = {"elasticsearch": {"client": {"hosts": ["127.0.0.1:9200"]}}} with pytest.raises(ConfigurationError): _ = Builder(configdict=test) class TestAuth(TestCase): """Test authentication methods""" def test_user_but_no_pass(self): """ Ensure ConfigurationError is Raised when username is provided but no password """ obj = Builder(configdict=DEFAULT) obj.other_args.username = "test" assert obj.other_args.password is None with pytest.raises(ConfigurationError): obj._check_basic_auth() def test_pass_but_no_user(self): """ Ensure ConfigurationError is Raised when password is provided but no username """ obj = Builder(configdict=DEFAULT) obj.client_args.hosts = ["http://127.0.0.1:9200"] obj.other_args.password = "test" assert obj.other_args.username is None with pytest.raises(ConfigurationError): obj._check_basic_auth() def test_id_but_no_api_key(self): """Ensure ConfigurationError is Raised when id is passed but no api_key""" test = { "elasticsearch": { "other_settings": {"api_key": {"id": "test"}}, "client": {"hosts": ["http://127.0.0.1:9200"]}, } } with pytest.raises(ConfigurationError): _ = Builder(configdict=test) def test_api_key_but_no_id(self): """Ensure ConfigurationError is Raised when api_key is passed but no id""" test = { "elasticsearch": { "other_settings": {"api_key": {"api_key": "test"}}, "client": {"hosts": ["http://127.0.0.1:9200"]}, } } with pytest.raises(ConfigurationError): _ = Builder(configdict=test) def test_no_api_key_values(self): """Ensure that API keys remain None""" api_id = None api_key = None api_token = None test = { "elasticsearch": { "other_settings": { "api_key": {"id": api_id, "api_key": api_key, "token": api_token} }, "client": {"hosts": ["http://127.0.0.1:9200"]}, } } obj = Builder(configdict=test) assert obj.client_args.api_key is None def test_proper_api_key(self): """Ensure that API key value is assigned to client_args when properly passed""" api_id = "foo" api_key = "bar" test = { "elasticsearch": { "other_settings": {"api_key": {"id": api_id, "api_key": api_key}}, "client": {"hosts": ["http://127.0.0.1:9200"]}, } } obj = Builder(configdict=test) assert obj.client_args.api_key == (api_id, api_key) def test_proper_api_key_token(self): """Ensure that API key value is assigned to client_args when token is good""" api_id = "foo" api_key = "bar" # token = base64.b64encode(bytes(f'{api_id}:{api_key}', 'utf-8')) token = "Zm9vOmJhcg==" test = { "elasticsearch": { "other_settings": {"api_key": {"token": token}}, "client": {"hosts": ["http://127.0.0.1:9200"]}, } } obj = Builder(configdict=test) assert obj.client_args.api_key == (api_id, api_key) def test_invalid_api_key_token(self): """Ensure that ConfigurationError is raise when token is invalid""" token = "This is an invalid token" test = { "elasticsearch": { "other_settings": {"api_key": {"token": token}}, "client": {"hosts": ["http://127.0.0.1:9200"]}, } } with pytest.raises(ConfigurationError): Builder(configdict=test) def test_basic_auth_tuple(self): """Test basic_auth is set properly""" usr = "username" pwd = "password" obj = Builder(configdict=DEFAULT) obj.other_args.username = usr obj.other_args.password = pwd obj._check_basic_auth() assert usr not in obj.client_args assert pwd not in obj.client_args assert (usr, pwd) == obj.client_args.basic_auth class TestCheckSSL(TestCase): """Ensure that certifi certificates are picked up""" def test_certifi(self): """ Ensure that the certifi.where() output matches what was inserted into client_args """ https = DEFAULT https["elasticsearch"]["client"]["hosts"] = "https://127.0.0.1:9200" obj = Builder(configdict=https) obj._check_ssl() assert certifi.where() == obj.client_args.ca_certs def test_ca_certs_named_but_no_file(self): """ Ensure that a ConfigurationError is raised if ca_certs is named but no file found """ tmp = FileTestObj() tmp.write_config( tmp.args["configfile"], """ This file will be deleted """, ) tmp.teardown() https = { "elasticsearch": { "client": { "hosts": ["http://127.0.0.1:9200"], "ca_certs": tmp.args["configfile"], } } } https["elasticsearch"]["client"]["hosts"] = "https://127.0.0.1:9200" with pytest.raises(ConfigurationError): Builder(configdict=https) # def test_context_for_empty_cloud_id(self): # """Test to see contents of ctx""" # yamlconfig = "\n".join( # [ # "---", # "elasticsearch:", # " client:", # " hosts:", # " - 'http://127.0.0.1:9200'", # " cloud_id: ", # ] # ) # # Build # file_obj = FileTestObj() # file_obj.write_config(file_obj.args["configfile"], yamlconfig) # # Test # val = get_yaml(file_obj.args["configfile"]) # key = 'draftcfg' # ctx = click.Context(click.Command('cmd'), obj={key: val}) # with ctx: # resp = process_cmd(key) # assert resp['logging']['logfile'] is None # # Teardown # file_obj.teardown() es_client-8.17.4/tests/unit/test_defaults.py000066400000000000000000000010741476634113400211340ustar00rootroot00000000000000"""Test functions in es_client.defaults""" from unittest import TestCase from es_client.defaults import ( CLIENT_SETTINGS, OTHER_SETTINGS, client_settings, other_settings, ) class TestSettings(TestCase): """ Ensure test coverage of simple functions that might be deprecated in the future """ def test_client_settings(self): """Ensure matching output""" assert CLIENT_SETTINGS == client_settings() def test_other_settings(self): """Ensure matching output""" assert OTHER_SETTINGS == other_settings() es_client-8.17.4/tests/unit/test_helpers_config.py000066400000000000000000000210641476634113400223150ustar00rootroot00000000000000"""Test helpers.config""" import ast from unittest import TestCase import pytest import click from click.testing import CliRunner from es_client.defaults import CLICK_SETTINGS, ES_DEFAULT from es_client.exceptions import ConfigurationError from es_client.helpers import config as cfgfn from es_client.helpers.utils import option_wrapper from . import ( DEFAULTCFG, DEFAULT_HOST, TESTUSER, TESTPASS, YAMLCONFIG, FileTestObj, simulator, default_config_cmd, simulate_override_client_args, ) ONOFF = {'on': '', 'off': 'no-'} click_opt_wrap = option_wrapper() def get_configdict(args, func): """Use a dummy click function to return the ctx.obj['configdict'] contents""" ctx = click.Context(func) with ctx: runner = CliRunner() result = runner.invoke(func, args) click.echo(f'RESULT = {result.output}') try: configdict = ast.literal_eval(result.output.splitlines()[-1]) except (ValueError, IndexError): configdict = {} return configdict, result class TestOverrideSettings(TestCase): """Test override_settings functionality""" key = 'dict_key' orig = {key: '1'} over = {key: '2'} def test_basic_operation(self): """Ensure basic functionality""" assert self.over == cfgfn.override_settings(self.orig, self.over) def test_raises(self): """Ensure exception is raised when override is a non-dictionary""" with pytest.raises(ConfigurationError): cfgfn.override_settings(self.orig, 'non-dict') class TestCliOpts(TestCase): """Test cli_opts function""" argname = 'arg' key = 'test' src = '1' ovr = '2' settings = {argname: {key: src}} onoff = {'on': '', 'off': 'no-'} override = {key: ovr} def test_basic_operation(self): """Ensure basic functionality""" assert ((f'--{self.argname}',), self.override) == cfgfn.cli_opts( self.argname, settings=self.settings, override=self.override ) def test_empty_override(self): """Ensure value is not changed when no override dictionary provided""" assert ((f'--{self.argname}',), self.settings[self.argname]) == cfgfn.cli_opts( self.argname, settings=self.settings ) def test_settings_is_none(self): """Ensure defaults are pulled up when no value is provided for settings""" value = 'ssl_version' assert ((f'--{value}',), CLICK_SETTINGS[value]) == cfgfn.cli_opts(value) def test_settings_is_nondict(self): """Ensure exception is raised when settings is not a dictionary""" with pytest.raises(ConfigurationError): cfgfn.cli_opts(self.argname, 'non-dictionary') def test_value_not_in_settings(self): """Ensure exception is raised when value is not a key in settings""" with pytest.raises(ConfigurationError): cfgfn.cli_opts(self.argname, {'no': 'match'}) def test_onoff_operation(self): """Ensure onoff arg naming functionality""" onval = f"{self.onoff['on']}{self.argname}" offval = f"{self.onoff['off']}{self.argname}" assert ( (f"--{onval}/--{offval}",), self.settings[self.argname], ) == cfgfn.cli_opts(self.argname, settings=self.settings, onoff=self.onoff) def test_onoff_raises_on_keyerror(self): """Ensure onoff raises when there's a KeyError""" with pytest.raises(ConfigurationError): cfgfn.cli_opts(self.argname, settings=self.settings, onoff={'foo': 'bar'}) class TestCloudIdOverride(TestCase): """Test cloud_id_override functionality""" def test_basic_operation(self): """Ensure basic operation""" # Build file_obj = FileTestObj() file_obj.write_config(file_obj.args['configfile'], DEFAULTCFG) test_param = 'cloud_id' test_value = 'dummy' cmdargs = [ '--config', file_obj.args['configfile'], f'--{test_param}', f'{test_value}', ] # Test configdict, _ = get_configdict(cmdargs, simulator) assert configdict assert configdict['elasticsearch']['client'][test_param] == test_value assert 'hosts' not in configdict['elasticsearch']['client'] # Teardown file_obj.teardown() class TestContextSettings(TestCase): """Test context_settings functionality""" def test_basic_operation(self): """Ensure basic operation""" key = 'help_option_names' value = ['-h', '--help'] retval = cfgfn.context_settings() assert value == retval[key] class TestOverrideClientArgs(TestCase): """Test override_client_args functionality, indirectly""" def test_uses_default(self): """ Test to ensure that the default URL is used when there are no hosts in either the config file or the command-line args """ cmdargs = [] configdict, _ = get_configdict(cmdargs, simulate_override_client_args) assert configdict assert ( ES_DEFAULT['elasticsearch']['client']['hosts'] == configdict['elasticsearch']['client']['hosts'] ) class TestGetConfig(TestCase): """Test get_config functionality""" def test_provided_config(self): """Test reading YAML provided as --config""" # Build file_obj = FileTestObj() file_obj.write_config( file_obj.args['configfile'], YAMLCONFIG.format(TESTUSER, TESTPASS) ) cmdargs = ['--config', file_obj.args['configfile']] # Test configdict, _ = get_configdict(cmdargs, simulator) assert configdict assert TESTUSER == configdict['elasticsearch']['other_settings']['username'] # Teardown file_obj.teardown() def test_default_config(self): """Test reading YAML provided as default config""" # This one is special because it needs to test the default_config cmdargs = [] configdict, _ = get_configdict(cmdargs, default_config_cmd) assert configdict assert TESTPASS == configdict['elasticsearch']['other_settings']['password'] def test_crazy_sauce(self): """Test this crazy configuration""" yamlconfig = "\n".join( [ "---", "elasticsearch:", " client:", " hosts:", f" - {DEFAULT_HOST}", " cloud_id:", " ca_certs:", " client_cert:", " client_key:", " verify_certs: False", " request_timeout: 30", " other_settings:", " master_only: False", f" username: {TESTUSER}", f" password: {TESTPASS}", " api_key:", " id:", " api_key:", " token:", ] ) # Build file_obj = FileTestObj() file_obj.write_config(file_obj.args['configfile'], yamlconfig) cmdargs = ['--config', file_obj.args['configfile']] # Test configdict, _ = get_configdict(cmdargs, simulator) for key in ['cloud_id', 'ca_certs', 'client_certs', 'client_key']: assert key not in configdict['elasticsearch']['client'] for key in ['id', 'api_key', 'token']: assert configdict['elasticsearch']['other_settings']['api_key'][key] is None # Teardown file_obj.teardown() class TestGetHosts(TestCase): """Test get_hosts functionality""" def test_basic_operation(self): """Ensure basic operation""" url = 'http://127.0.0.123:9200' cmdargs = ['--hosts', url] configdict, _ = get_configdict(cmdargs, simulator) assert configdict assert [url] == configdict['elasticsearch']['client']['hosts'] def test_params_has_no_hosts(self): """ Ensure the default hosts value is used when neither config nor params has any hosts """ cmdargs = [] expected = 'http://127.0.0.1:9200' configdict, _ = get_configdict(cmdargs, simulator) assert configdict assert [expected] == configdict['elasticsearch']['client']['hosts'] def test_raises_on_bad_url(self): """Ensure an exception is raised when a host has a bad URL schema""" url = 'hppt://elastic.co' cmdargs = ['--hosts', url] _, result = get_configdict(cmdargs, simulator) assert result.exit_code == 1 assert isinstance(result.exception, ConfigurationError) es_client-8.17.4/tests/unit/test_helpers_logging.py000066400000000000000000000037471476634113400225060ustar00rootroot00000000000000"""Test helpers.logging""" from unittest import TestCase import pytest import click from es_client.helpers.logging import check_logging_config, get_numeric_loglevel from es_client.helpers.utils import get_yaml from . import FileTestObj def process_cmd(key): """Return the key from the click context's object""" return click.get_current_context().obj[key] class TestCheckLoggingConfig(TestCase): """Test check_logging_config functionality""" default = { "loglevel": "INFO", "blacklist": ["elastic_transport", "urllib3"], "logfile": None, "logformat": "default", } def test_non_dict(self): """Ensure it yields default values""" assert self.default == check_logging_config("not-a-dict") def test_empty_key(self): """Ensure it yields default values too""" assert self.default == check_logging_config({"logging": {}}) def test_logging_context_for_empty_logfile(self): """Test to see contents of ctx""" yamlconfig = "\n".join( [ "---", "logging:", " loglevel: INFO", " logfile: ", " logformat: default", " blacklist: ['elastic_transport', 'urllib3']", ] ) # Build file_obj = FileTestObj() file_obj.write_config(file_obj.args["configfile"], yamlconfig) # Test val = get_yaml(file_obj.args["configfile"]) key = 'draftcfg' ctx = click.Context(click.Command('cmd'), obj={key: val}) with ctx: resp = process_cmd(key) assert resp['logging']['logfile'] is None # Teardown file_obj.teardown() class TestGetNumericLogLevel(TestCase): """Test get_numeric_loglevel function""" def test_invalid_loglevel(self): """Ensure it raises an exception when an invalid loglevel is provided""" with pytest.raises(ValueError): get_numeric_loglevel("NONSENSE") es_client-8.17.4/tests/unit/test_helpers_logging_new.py000066400000000000000000000165631476634113400233570ustar00rootroot00000000000000"""Unit tests for logging-related helper functions.""" from io import StringIO import logging import json import re import tempfile import unittest from unittest.mock import MagicMock from es_client.helpers.logging import ( Whitelist, Blacklist, JSONFormatter, check_logging_config, override_logging, get_logger, get_numeric_loglevel, get_format_string, check_log_opts, de_dot, deepmerge, ) # Test custom logging filters class TestLoggingFilters(unittest.TestCase): """Test custom logging filters.""" def test_whitelist_filter(self): """Test that Whitelist filter allows only specified logger names.""" whitelist = Whitelist('test_logger') record_allowed = logging.LogRecord( 'test_logger', logging.INFO, 'path', 1, 'message', None, None ) record_blocked = logging.LogRecord( 'other_logger', logging.INFO, 'path', 1, 'message', None, None ) self.assertTrue(whitelist.filter(record_allowed)) self.assertFalse(whitelist.filter(record_blocked)) def test_blacklist_filter(self): """Test that Blacklist filter blocks specified logger names.""" blacklist = Blacklist('test_logger') record_blocked = logging.LogRecord( 'test_logger', logging.INFO, 'path', 1, 'message', None, None ) record_allowed = logging.LogRecord( 'other_logger', logging.INFO, 'path', 1, 'message', None, None ) self.assertFalse(blacklist.filter(record_blocked)) self.assertTrue(blacklist.filter(record_allowed)) # Test JSONFormatter class TestJSONFormatter(unittest.TestCase): """Test JSONFormatter class.""" def test_format(self): """Test that JSONFormatter correctly formats log records into JSON.""" formatter = JSONFormatter() record = logging.LogRecord( 'test_logger', logging.INFO, 'path', 1, 'Test message', None, None ) formatted = formatter.format(record) data = json.loads(formatted) self.assertIn('@timestamp', data) self.assertIn('message', data) self.assertEqual(data['message'], 'Test message') self.assertIn('loglevel', data) self.assertEqual(data['loglevel'], 'INFO') # Verify timestamp format (ISO 8601 with milliseconds) self.assertTrue( re.match(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z', data['@timestamp']) ) # Test configuration-related functions class TestConfigurationFunctions(unittest.TestCase): """Test configuration-related functions.""" def test_check_logging_config_valid(self): """Test check_logging_config with a valid configuration.""" valid_config = {'logging': {'loglevel': 'DEBUG', 'logformat': 'json'}} result = check_logging_config(valid_config) self.assertEqual(result['loglevel'], 'DEBUG') self.assertEqual(result['logformat'], 'json') def test_check_logging_config_invalid(self): """Test check_logging_config with an invalid log level.""" invalid_config = {'logging': {'loglevel': 'INVALID'}} with self.assertRaises( Exception ): # Schema validation should raise an exception check_logging_config(invalid_config) def test_check_logging_config_no_config(self): """Test check_logging_config with no configuration provided.""" no_config = {} result = check_logging_config(no_config) self.assertEqual(result['loglevel'], 'INFO') # Default value def test_override_logging(self): """Test override_logging merges CLI options over config file settings.""" ctx = MagicMock() ctx.obj = {'draftcfg': {'logging': {'loglevel': 'INFO'}}} ctx.params = {'loglevel': 'DEBUG'} result = override_logging(ctx) self.assertEqual(result['loglevel'], 'DEBUG') # Test logger setup and related functions class TestLoggerSetup(unittest.TestCase): """Test logger setup and related functions.""" def setUp(self): """Reset logging configuration before each test.""" logging.root.handlers = [] logging.root.setLevel(logging.NOTSET) def tearDown(self): """Reset logging configuration after each test.""" logging.root.handlers = [] logging.root.setLevel(logging.NOTSET) def test_get_logger_with_logfile(self): """Test get_logger with a logfile specified.""" with tempfile.NamedTemporaryFile() as tmpfile: log_opts = { 'loglevel': 'INFO', 'logfile': tmpfile.name, 'logformat': 'default', 'blacklist': [], } get_logger(log_opts) logger = logging.getLogger('test_logger_with_logfile') logger.info('Test message') with open(tmpfile.name, 'r', encoding='utf8') as f: content = f.read() self.assertIn('Test message', content) def test_get_logger_without_logfile(self): """Test get_logger without a logfile, using stream handlers.""" log_opts = {'loglevel': 'INFO', 'logformat': 'default', 'blacklist': []} get_logger(log_opts) logger = logging.getLogger('test_logger_without_logfile') stdout = StringIO() stderr = StringIO() handler_stdout = logging.StreamHandler(stdout) handler_stderr = logging.StreamHandler(stderr) logger.addHandler(handler_stdout) logger.addHandler(handler_stderr) logger.info('Test info message') logger.error('Test error message') self.assertIn('Test info message', stdout.getvalue()) self.assertIn('Test error message', stderr.getvalue()) def test_get_numeric_loglevel(self): """Test conversion of string log levels to numeric values.""" self.assertEqual(get_numeric_loglevel('DEBUG'), 10) self.assertEqual(get_numeric_loglevel('INFO'), 20) with self.assertRaises(ValueError): get_numeric_loglevel('INVALID') # Test utility functions class TestUtilityFunctions(unittest.TestCase): """Test utility functions.""" def test_get_format_string(self): """Test format string selection based on log level.""" debug_format = get_format_string(10) # DEBUG info_format = get_format_string(20) # INFO self.assertIn('%(funcName)', debug_format) # Detailed format for DEBUG self.assertNotIn('%(funcName)', info_format) # Simpler format for INFO def test_check_log_opts(self): """Test that check_log_opts applies defaults to partial configs.""" partial_opts = {'loglevel': 'DEBUG'} result = check_log_opts(partial_opts) self.assertEqual(result['loglevel'], 'DEBUG') self.assertEqual(result['logfile'], None) # Default self.assertEqual(result['logformat'], 'default') # Default def test_de_dot(self): """Test conversion of dotted strings to nested dictionaries.""" self.assertEqual(de_dot('loglevel', 'INFO'), {'loglevel': 'INFO'}) self.assertEqual(de_dot('a.b.c', 'value'), {'a': {'b': {'c': 'value'}}}) def test_deepmerge(self): """Test recursive merging of dictionaries.""" source = {'a': {'b': {'c': 'value'}}} destination = {'a': {'b': {'d': 'other'}}} result = deepmerge(source, destination) self.assertEqual(result, {'a': {'b': {'c': 'value', 'd': 'other'}}}) es_client-8.17.4/tests/unit/test_helpers_schemacheck.py000066400000000000000000000034331476634113400233060ustar00rootroot00000000000000"""Test helpers.schemacheck""" from unittest import TestCase import pytest from voluptuous import Schema from es_client.exceptions import FailedValidation from es_client.helpers.schemacheck import SchemaCheck from es_client.defaults import ( config_schema, VERSION_MIN, version_min, VERSION_MAX, version_max, ) class TestSchemaCheck(TestCase): """Test SchemaCheck class and member functions""" def test_bad_port_value(self): """Ensure that a bad port value Raises a FailedValidation""" config = {"elasticsearch": {"client": {"port": 70000}}} schema = SchemaCheck(config, config_schema(), "elasticsearch", "client") with pytest.raises(FailedValidation): schema.result() def test_entirely_wrong_keys(self): """Ensure that unacceptable keys Raises a FailedValidation""" config = { "elasticsearch": { "client_not": {}, "not_aws": {}, }, "something_else": "foo", } schema = SchemaCheck(config, config_schema(), "elasticsearch", "client") with pytest.raises(FailedValidation): schema.result() def test_does_not_password_filter_non_dict(self): """Ensure that if config is not a dictionary that it doesn't choke""" config = None schema = SchemaCheck(config, Schema(config), "arbitrary", "anylocation") assert schema.result() is None class TestVersionMinMax(TestCase): """Test version min and max functions""" def test_version_max(self): """Ensure version_max returns what it's set with""" assert VERSION_MAX == version_max() def test_version_min(self): """Ensure version_min returns what it's set with""" assert VERSION_MIN == version_min() es_client-8.17.4/tests/unit/test_helpers_utils.py000066400000000000000000000225311476634113400222100ustar00rootroot00000000000000"""Test helpers.utils""" import os import random import string import binascii from unittest import TestCase from unittest.mock import Mock import pytest from es_client.exceptions import ConfigurationError from es_client.helpers import utils as u from . import FileTestObj # pylint: disable=R0903,W0718 DEFAULT = { "elasticsearch": { "other_settings": { "master_only": False, "skip_version_test": False, "username": None, "password": None, }, "client": { "hosts": "http://127.0.0.1:9200", "request_timeout": 30, }, } } # The leading spaces are important here to create a proper yaml file. YAML = "\n".join(["---", "elasticsearch:", " client:", " hosts: {0}"]) def random_envvar(size): """Generate a random environment variable""" return "".join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(size) ) class TestEnsureList(TestCase): """Test the u.ensure_list function""" def test_utils_ensure_list_returns_lists(self): """ Test several examples of lists: existing lists, strings, mixed lists/numbers """ verify = ["a", "b", "c", "d"] source = ["a", "b", "c", "d"] assert verify == u.ensure_list(source) verify = ["abcd"] source = "abcd" assert verify == u.ensure_list(source) verify = [["abcd", "defg"], 1, 2, 3] source = [["abcd", "defg"], 1, 2, 3] assert verify == u.ensure_list(source) verify = [{"a": "b", "c": "d"}] source = {"a": "b", "c": "d"} assert verify == u.ensure_list(source) class TestPruneNones(TestCase): """Test the u.prune_nones function""" def test_utils_prune_nones_with(self): """Ensure that a dict with a single None value comes back as an empty dict""" assert not u.prune_nones({"a": None}) def test_utils_prune_nones_without(self): """Ensure that a dict with no None values comes back unchanged""" testval = {"foo": "bar"} assert testval == u.prune_nones(testval) class TestReadFile: """Test the u.read_file function""" def test_utils_read_file_present(self): """Ensure that the written value is what was in the filename""" obj = FileTestObj() assert obj.written_value == u.read_file(obj.args["filename"]) obj.teardown() def test_raise_when_no_file(self): """Raise an exception when there is no file""" obj = FileTestObj() with pytest.raises(ConfigurationError): u.read_file(obj.args["no_file_here"]) obj.teardown() class TestReadCerts: """Test the u.verify_ssl_paths function""" def test_all_as_one(self): """Test all 3 possible cert files at once from the same file""" obj = FileTestObj() config = { "ca_certs": obj.args["filename"], "client_cert": obj.args["filename"], "client_key": obj.args["filename"], } try: u.verify_ssl_paths(config) except Exception: pytest.fail("Unexpected Exception...") class TestEnvVars: """Test the ability to read environment variables""" def test_present(self): """Test an existing (present) envvar""" obj = FileTestObj() evar = random_envvar(8) os.environ[evar] = "1234" dollar = "${" + evar + "}" obj.write_config(obj.args["configfile"], YAML.format(dollar)) cfg = u.get_yaml(obj.args["configfile"]) assert cfg["elasticsearch"]["client"]["hosts"] == os.environ.get(evar) del os.environ[evar] obj.teardown() def test_not_present(self): """Test a non-existent (not-present) envvar. It should set None here""" obj = FileTestObj() evar = random_envvar(8) dollar = "${" + evar + "}" obj.write_config(obj.args["configfile"], YAML.format(dollar)) cfg = u.get_yaml(obj.args["configfile"]) assert cfg["elasticsearch"]["client"]["hosts"] is None obj.teardown() def test_not_present_with_default(self): """ Test a non-existent (not-present) envvar. It should set a default value here """ obj = FileTestObj() evar = random_envvar(8) default = random_envvar(8) dollar = "${" + evar + ":" + default + "}" obj.write_config(obj.args["configfile"], YAML.format(dollar)) cfg = u.get_yaml(obj.args["configfile"]) assert cfg["elasticsearch"]["client"]["hosts"] == default obj.teardown() def test_raises_exception(self): """Ensure that improper formatting raises a ConfigurationError exception""" obj = FileTestObj() obj.write_config( obj.args["configfile"], """ [weird brackets go here] I'm not a yaml file!!!=I have no keys I have lots of spaces """, ) with pytest.raises(ConfigurationError): u.get_yaml(obj.args["configfile"]) obj.teardown() class TestVerifyURLSchema: """Test the u.verify_url_schema function""" def test_full_schema(self): """Verify that a proper schema comes back unchanged""" url = "https://127.0.0.1:9200" assert u.verify_url_schema(url) == url def test_http_schema_no_port(self): """ Verify that port 80 is tacked on when no port is specified as a port is required """ http_port = "80" url = "http://127.0.0.1" assert u.verify_url_schema(url) == "http://127.0.0.1" + ":" + http_port def test_https_schema_no_port(self): """ Verify that 443 is tacked on when no port is specified but https is the schema """ https_port = "443" url = "https://127.0.0.1" assert u.verify_url_schema(url) == "https://127.0.0.1" + ":" + https_port def test_bad_schema_no_port(self): """A URL starting with other than http or https raises an exception w/o port""" url = "abcd://127.0.0.1" with pytest.raises(ConfigurationError): u.verify_url_schema(url) def test_bad_schema_with_port(self): """A URL starting with other than http or https raises an exception w/port""" url = "abcd://127.0.0.1:1234" with pytest.raises(ConfigurationError): u.verify_url_schema(url) def test_bad_schema_too_many_colons(self): """An invalid URL with too many colons raises an exception""" url = "http://127.0.0.1:1234:5678" with pytest.raises(ConfigurationError): u.verify_url_schema(url) class TestGetVersion: """Test the u.get_version function""" def test_positive(self): """Ensure that what goes in comes back out unchanged""" client = Mock() client.info.return_value = {"version": {"number": "9.9.9"}} version = u.get_version(client) assert version == (9, 9, 9) def test_negative(self): """Ensure that mismatches are caught""" client = Mock() client.info.return_value = {"version": {"number": "9.9.9"}} version = u.get_version(client) assert version != (8, 8, 8) def test_dev_version_4_dots(self): """Test that anything after a third value and a period is truncated""" client = Mock() client.info.return_value = {"version": {"number": "9.9.9.dev"}} version = u.get_version(client) assert version == (9, 9, 9) def test_dev_version_with_dash(self): """Test that anything after a third value and a dash is truncated""" client = Mock() client.info.return_value = {"version": {"number": "9.9.9-dev"}} version = u.get_version(client) assert version == (9, 9, 9) class TestFileExists: """Test the u.file_exists function""" def test_positive(self): """Ensure that an existing file returns True""" obj = FileTestObj() obj.write_config( obj.args["configfile"], """ [weird brackets go here] I'm not a yaml file!!!=I have no keys I have lots of spaces """, ) assert u.file_exists(obj.args["configfile"]) obj.teardown() def test_negative(self): """Ensure that a non-existing file returns False""" obj = FileTestObj() obj.write_config( obj.args["configfile"], """ This file will be deleted """, ) obj.teardown() assert not u.file_exists(obj.args["configfile"]) class TestParseAPIKeyToken: """Test the u.parse_apikey_token function""" def success(self): """Successfully parse a token""" token = "X1VoN0VZY0JJV0lrUTlrdS1QZ2k6QjNZN1VJMlVRd0NHM1VTdHhuNnRKdw==" expected = ("_Uh7EYcBIWIkQ9ku-Pgi", "B3Y7UI2UQwCG3UStxn6tJw") assert expected == u.parse_apikey_token(token) def raises_exception1(self): """Raise a binascii.Error when unable to base64 decode a token""" token = "Not a valid token" with pytest.raises(binascii.Error): u.parse_apikey_token(token) def raises_exception2(self): """Raise an IndexError when able to base64 decode a token, not split by colon""" token = "VGhpcyB0ZXh0IGhhcyBubyBjb2xvbg==" with pytest.raises(IndexError): u.parse_apikey_token(token)