pax_global_header00006660000000000000000000000064142616326270014523gustar00rootroot0000000000000052 comment=72f102d2e70df1f98948b8d7686babb84d4327ff elasticsearch-py-7.17.6/000077500000000000000000000000001426163262700150655ustar00rootroot00000000000000elasticsearch-py-7.17.6/.ci/000077500000000000000000000000001426163262700155365ustar00rootroot00000000000000elasticsearch-py-7.17.6/.ci/Dockerfile000066400000000000000000000016171426163262700175350ustar00rootroot00000000000000ARG PYTHON_VERSION=3.8 FROM python:${PYTHON_VERSION} # Default UID/GID to 1000 # it can be overridden at build time ARG BUILDER_UID=1000 ARG BUILDER_GID=1000 ENV BUILDER_USER elastic ENV BUILDER_GROUP elastic # Create user RUN groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP} \ && useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GROUP} -d /var/lib/elastic -m elastic 1>/dev/null 2>/dev/null \ && mkdir -p /code/elasticsearch-py && mkdir /code/elasticsearch-py/build \ && chown -R ${BUILDER_USER}:${BUILDER_GROUP} /code/elasticsearch-py COPY --chown=$BUILDER_USER:$BUILDER_GROUP . . WORKDIR /code/elasticsearch-py USER ${BUILDER_USER}:${BUILDER_GROUP} COPY dev-requirements.txt . RUN python -m pip install \ -U --no-cache-dir \ --disable-pip-version-check \ nox -rdev-requirements.txt COPY --chown=$BUILDER_USER:$BUILDER_GROUP . . RUN python -m pip install -U -e .elasticsearch-py-7.17.6/.ci/certs/000077500000000000000000000000001426163262700166565ustar00rootroot00000000000000elasticsearch-py-7.17.6/.ci/certs/ca.crt000077500000000000000000000022601426163262700177560ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIDSjCCAjKgAwIBAgIVAJQLm8V2LcaCTHUcoIfO+KL63nG3MA0GCSqGSIb3DQEB CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu ZXJhdGVkIENBMB4XDTIwMDIyNjA1NTA1N1oXDTIzMDIyNTA1NTA1N1owNDEyMDAG A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDYyajkPvGtUOE5M1OowQfB kWVrWjo1+LIxzgCeRHp0YztLtdVJ0sk2xoSrt2uZpxcPepdyOseLTjFJex1D2yCR AEniIqcFif4G72nDih2LlbhpUe/+/MTryj8ZTkFTzI+eMmbQi5FFMaH+kwufmdt/ 5/w8YazO18SxxJUlzMqzfNUrhM8vvvVdxgboU7PWhk28wZHCMHQovomHmzclhRpF N0FMktA98vHHeRjH19P7rNhifSd7hZzoH3H148HVAKoPgqnZ6vW2O2YfAWOP6ulq cyszr57p8fS9B2wSdlWW7nVHU1JuKcYD67CxbBS23BeGFgCj4tiNrmxO8S5Yf85v AgMBAAGjUzBRMB0GA1UdDgQWBBSWAlip9eoPmnG4p4OFZeOUBlAbNDAfBgNVHSME GDAWgBSWAlip9eoPmnG4p4OFZeOUBlAbNDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQA19qqrMTWl7YyId+LR/QIHDrP4jfxmrEELrAL58q5Epc1k XxZLzOBSXoBfBrPdv+3XklWqXrZjKWfdkux0Xmjnl4qul+srrZDLJVZG3I7IrITh AmQUmL9MuPiMnAcxoGZp1xpijtW8Qmd2qnambbljWfkuVaa4hcVRfrAX6TciIQ21 bS5aeLGrPqR14h30YzDp0RMmTujEa1o6ExN0+RSTkE9m89Q6WdM69az8JW7YkWqm I+UCG3TcLd3TXmN1zNQkq4y2ObDK4Sxy/2p6yFPI1Fds5w/zLfBOvvPQY61vEqs8 SCCcQIe7f6NDpIRIBlty1C9IaEHj7edyHjF6rtYb -----END CERTIFICATE----- elasticsearch-py-7.17.6/.ci/certs/ca.pem000066400000000000000000000022601426163262700177440ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIDSjCCAjKgAwIBAgIVAJQLm8V2LcaCTHUcoIfO+KL63nG3MA0GCSqGSIb3DQEB CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu ZXJhdGVkIENBMB4XDTIwMDIyNjA1NTA1N1oXDTIzMDIyNTA1NTA1N1owNDEyMDAG A1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0Ew ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDYyajkPvGtUOE5M1OowQfB kWVrWjo1+LIxzgCeRHp0YztLtdVJ0sk2xoSrt2uZpxcPepdyOseLTjFJex1D2yCR AEniIqcFif4G72nDih2LlbhpUe/+/MTryj8ZTkFTzI+eMmbQi5FFMaH+kwufmdt/ 5/w8YazO18SxxJUlzMqzfNUrhM8vvvVdxgboU7PWhk28wZHCMHQovomHmzclhRpF N0FMktA98vHHeRjH19P7rNhifSd7hZzoH3H148HVAKoPgqnZ6vW2O2YfAWOP6ulq cyszr57p8fS9B2wSdlWW7nVHU1JuKcYD67CxbBS23BeGFgCj4tiNrmxO8S5Yf85v AgMBAAGjUzBRMB0GA1UdDgQWBBSWAlip9eoPmnG4p4OFZeOUBlAbNDAfBgNVHSME GDAWgBSWAlip9eoPmnG4p4OFZeOUBlAbNDAPBgNVHRMBAf8EBTADAQH/MA0GCSqG SIb3DQEBCwUAA4IBAQA19qqrMTWl7YyId+LR/QIHDrP4jfxmrEELrAL58q5Epc1k XxZLzOBSXoBfBrPdv+3XklWqXrZjKWfdkux0Xmjnl4qul+srrZDLJVZG3I7IrITh AmQUmL9MuPiMnAcxoGZp1xpijtW8Qmd2qnambbljWfkuVaa4hcVRfrAX6TciIQ21 bS5aeLGrPqR14h30YzDp0RMmTujEa1o6ExN0+RSTkE9m89Q6WdM69az8JW7YkWqm I+UCG3TcLd3TXmN1zNQkq4y2ObDK4Sxy/2p6yFPI1Fds5w/zLfBOvvPQY61vEqs8 SCCcQIe7f6NDpIRIBlty1C9IaEHj7edyHjF6rtYb -----END CERTIFICATE----- elasticsearch-py-7.17.6/.ci/certs/testnode.crt000077500000000000000000000023211426163262700212160ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIDYjCCAkqgAwIBAgIVAIZQH0fe5U+bGQ6m1JUBO/AQkQ/9MA0GCSqGSIb3DQEB CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu ZXJhdGVkIENBMB4XDTIwMDMyNzE5MTcxMVoXDTIzMDMyNzE5MTcxMVowEzERMA8G A1UEAxMIaW5zdGFuY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDB fco1t1+sE1gTwTVGcXKZqJTP2GjMHM0cfJE5KKfwC5B+pHADRT6FZxvepgKjEBDt CK+2Rmotyeb15XXMSKguNhyT+2PuKvT5r05L7P91XRYXrwxG2swJPtct7A87xdFa Ek+YRpqGGmTaux2jOELMiAmqEzoj6w/xFq+LF4SolTW4wOL2eLFkEFHBX2oCwU5T Q+B+7E9zL45nFWlkeRGJ+ZQTnRNZ/1r4N9A9Gtj4x/H1/y4inWndikdxAb5QiEYJ T+vbQWzHYWjz13ttHJsz+6T8rvA1jK+buHgVh4K8lV13X9k54soBqHB8va7/KIJP g8gvd6vusEI7Bmfl1as7AgMBAAGjgYswgYgwHQYDVR0OBBYEFKnnpvuVYwtFSUis WwN9OHLyExzJMB8GA1UdIwQYMBaAFJYCWKn16g+acbing4Vl45QGUBs0MDsGA1Ud EQQ0MDKCCWxvY2FsaG9zdIIIaW5zdGFuY2WHBH8AAAGHEAAAAAAAAAAAAAAAAAAA AAGCA2VzMTAJBgNVHRMEAjAAMA0GCSqGSIb3DQEBCwUAA4IBAQAPNsIoD4GBrTgR jfvBuHS6eU16P95m16O8Mdpr4SMQgWLQUhs8aoVgfwpg2TkbCWxOe6khJOyNm7bf fW4aFQ/OHcQV4Czz3c7eOHTWSyMlCOv+nRXd4giJZ5TOHw1zKGmKXOIvhvE6RfdF uBBfrusk164H4iykm0Bbr/wo4d6wuebp3ZYLPw5zV0D08rsaR+3VJ9VxWuFpdm/r 2onYOohyuX9DRjAczasC+CRRQN4eHJlRfSQB8WfTKw3EloRJJDAg6SJyGiAJ++BF hnqfNcEyKes2AWagFF9aTbEJMrzMhH+YB5F+S/PWvMUlFzcoocVKqc4pIrjKUNWO 6nbTxeAB -----END CERTIFICATE----- elasticsearch-py-7.17.6/.ci/certs/testnode.key000077500000000000000000000032131426163262700212170ustar00rootroot00000000000000-----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEAwX3KNbdfrBNYE8E1RnFymaiUz9hozBzNHHyROSin8AuQfqRw A0U+hWcb3qYCoxAQ7QivtkZqLcnm9eV1zEioLjYck/tj7ir0+a9OS+z/dV0WF68M RtrMCT7XLewPO8XRWhJPmEaahhpk2rsdozhCzIgJqhM6I+sP8RavixeEqJU1uMDi 9nixZBBRwV9qAsFOU0PgfuxPcy+OZxVpZHkRifmUE50TWf9a+DfQPRrY+Mfx9f8u Ip1p3YpHcQG+UIhGCU/r20Fsx2Fo89d7bRybM/uk/K7wNYyvm7h4FYeCvJVdd1/Z OeLKAahwfL2u/yiCT4PIL3er7rBCOwZn5dWrOwIDAQABAoIBAFcm4ICnculf4Sks umFbUiISA81GjZV6V4zAMu1K+bGuk8vnJyjh9JJD6hK0NbXa07TgV7zDJKoxKd2S GCgGhfIin2asMcuh/6vDIYIjYsErR3stdlsnzAVSD7v4ergSlwR6AO32xz0mAE1h QK029yeHEstPU72/7/NIo5MD6dXAbut1MzgijZD8RQo1z21D6qmLcPTVTfkn7a3W MY3y7XUIkA1TOyIRsH3k6F6NBWkvtXbwOUeLCJ14EvS8T9BqhIhPDZv8mQTRLDOD tQRyC4Cnw+UhYmnMFJhj6N2jpTBv/AdoKcRC56uBJyPW+dxj6i4e7n3pQuxqRvpI LLJJsskCgYEA4QQxzuJizLKV75rE+Qxg0Ej0Gid1aj3H5eeTZOUhm9KC8KDfPdpk msKaNzJq/VDcqHPluGS1jYZVgZlal1nk5xKBcbQ4n297VPVd+sLtlf0bj4atlDUO +iOVo0H7k5yWvj+TzVRlc5zjDLcnQh8i+22o3+65hIrb2zpzg/cCZJ8CgYEA3CJX bjmWPQ0uZVIa8Wz8cJFtKT9uVl7Z3/f6HjN9I0b/9MmVlNxQVAilVwhDkzR/UawG QeRFBJ6XWRwX0aoMq+O9VSNu/R2rtEMpIYt3LwbI3yw6GRoCdB5qeL820O+KX5Fl /z+ZNgrHgA1yKPVf+8ke2ZtLEqPHMN+BMuq8t+UCgYEAy0MfvzQPbbuw55WWcyb0 WZJdNzcHwKX4ajzrj4vP9VOPRtD7eINMt+QsrMnVjei6u0yeahhHTIXZvc2K4Qeq V/YGinDzaUqqTU+synXFauUOPXO6XxQi6GC2rphPKsOcBFWoLSYc0vgYvgbA5uD7 l8Yyc77RROKuwfWmHcJHHh8CgYBurGFSjGdJWHgr/oSHPqkIG0VLiJV7nQJjBPRd /Lr8YnTK6BJpHf7Q0Ov3frMirjEYqakXtaExel5TMbmT8q+eN8h3pnHlleY+oclr EQghv4J8GWs4NYhoQuZ6wH/ZuaTS+XHTS3FG51J3wcrUZtET8ICvHNE4lNjPbH8z TysENQKBgHER1RtDFdz+O7mlWibrHk8JDgcVdZV/pBF+9cb7r/orkH9RLAHDlsAO tuSVaQmm5eqgaAxMamBXSyw1lir07byemyuEDg0mJ1rNUGsAY8P+LWr579gvKMme 5gvrJr99JkBTV3z+TiL7dZa52eW00Ijqg2qcbHGpq3kXWWkbd8Tn -----END RSA PRIVATE KEY----- elasticsearch-py-7.17.6/.ci/functions/000077500000000000000000000000001426163262700175465ustar00rootroot00000000000000elasticsearch-py-7.17.6/.ci/functions/cleanup.sh000077500000000000000000000036401426163262700215370ustar00rootroot00000000000000#!/usr/bin/env bash # # Shared cleanup routines between different steps # # Please source .ci/functions/imports.sh as a whole not just this file # # Version 1.0.0 # - Initial version after refactor function cleanup_volume { if [[ "$(docker volume ls -q -f name=$1)" ]]; then echo -e "\033[34;1mINFO:\033[0m Removing volume $1\033[0m" (docker volume rm "$1") || true fi } function container_running { if [[ "$(docker ps -q -f name=$1)" ]]; then return 0; else return 1; fi } function cleanup_node { if container_running "$1"; then echo -e "\033[34;1mINFO:\033[0m Removing container $1\033[0m" (docker container rm --force --volumes "$1") || true fi if [[ -n "$1" ]]; then echo -e "\033[34;1mINFO:\033[0m Removing volume $1-${suffix}-data\033[0m" cleanup_volume "$1-${suffix}-data" fi } function cleanup_network { if [[ "$(docker network ls -q -f name=$1)" ]]; then echo -e "\033[34;1mINFO:\033[0m Removing network $1\033[0m" (docker network rm "$1") || true fi } function cleanup_trap { status=$? set +x if [[ "$DETACH" != "true" ]]; then echo -e "\033[34;1mINFO:\033[0m clean the network if not detached (start and exit)\033[0m" cleanup_all_in_network "$1" fi # status is 0 or SIGINT if [[ "$status" == "0" || "$status" == "130" ]]; then echo -e "\n\033[32;1mSUCCESS run-tests\033[0m" exit 0 else echo -e "\n\033[31;1mFAILURE during run-tests\033[0m" exit ${status} fi }; function cleanup_all_in_network { if [[ -z "$(docker network ls -q -f name="^$1\$")" ]]; then echo -e "\033[34;1mINFO:\033[0m $1 is already deleted\033[0m" return 0 fi containers=$(docker network inspect -f '{{ range $key, $value := .Containers }}{{ printf "%s\n" .Name}}{{ end }}' $1) while read -r container; do cleanup_node "$container" done <<< "$containers" cleanup_network $1 echo -e "\033[32;1mSUCCESS:\033[0m Cleaned up and exiting\033[0m" }; elasticsearch-py-7.17.6/.ci/functions/imports.sh000077500000000000000000000034631426163262700216100ustar00rootroot00000000000000#!/usr/bin/env bash # # Sets up all the common variables and imports relevant functions # # Version 1.0.1 # - Initial version after refactor # - Validate STACK_VERSION asap function require_stack_version() { if [[ -z $STACK_VERSION ]]; then echo -e "\033[31;1mERROR:\033[0m Required environment variable [STACK_VERSION] not set\033[0m" exit 1 fi } require_stack_version if [[ -z $es_node_name ]]; then # only set these once set -euo pipefail export TEST_SUITE=${TEST_SUITE-platinum} export RUNSCRIPTS=${RUNSCRIPTS-} export DETACH=${DETACH-false} export CLEANUP=${CLEANUP-false} export es_node_name=instance export elastic_password=changeme export elasticsearch_image=elasticsearch export elasticsearch_url=https://elastic:${elastic_password}@${es_node_name}:9200 if [[ $TEST_SUITE != "platinum" ]]; then export elasticsearch_url=http://${es_node_name}:9200 fi export external_elasticsearch_url=${elasticsearch_url/$es_node_name/localhost} export elasticsearch_container="${elasticsearch_image}:${STACK_VERSION}" export suffix=rest-test export moniker=$(echo "$elasticsearch_container" | tr -C "[:alnum:]" '-') export network_name=${moniker}${suffix} export ssl_cert="${script_path}/certs/testnode.crt" export ssl_key="${script_path}/certs/testnode.key" export ssl_ca="${script_path}/certs/ca.crt" fi export script_path=$(dirname $(realpath -s $0)) source $script_path/functions/cleanup.sh source $script_path/functions/wait-for-container.sh trap "cleanup_trap ${network_name}" EXIT if [[ "$CLEANUP" == "true" ]]; then cleanup_all_in_network $network_name exit 0 fi echo -e "\033[34;1mINFO:\033[0m Creating network $network_name if it does not exist already \033[0m" docker network inspect "$network_name" > /dev/null 2>&1 || docker network create "$network_name" elasticsearch-py-7.17.6/.ci/functions/wait-for-container.sh000077500000000000000000000024651426163262700236240ustar00rootroot00000000000000#!/usr/bin/env bash # # Exposes a routine scripts can call to wait for a container if that container set up a health command # # Please source .ci/functions/imports.sh as a whole not just this file # # Version 1.0.1 # - Initial version after refactor # - Make sure wait_for_contiainer is silent function wait_for_container { set +x until ! container_running "$1" || (container_running "$1" && [[ "$(docker inspect -f "{{.State.Health.Status}}" ${1})" != "starting" ]]); do echo "" docker inspect -f "{{range .State.Health.Log}}{{.Output}}{{end}}" ${1} echo -e "\033[34;1mINFO:\033[0m waiting for node $1 to be up\033[0m" sleep 2; done; # Always show logs if the container is running, this is very useful both on CI as well as while developing if container_running $1; then docker logs $1 fi if ! container_running $1 || [[ "$(docker inspect -f "{{.State.Health.Status}}" ${1})" != "healthy" ]]; then cleanup_all_in_network $2 echo echo -e "\033[31;1mERROR:\033[0m Failed to start $1 in detached mode beyond health checks\033[0m" echo -e "\033[31;1mERROR:\033[0m dumped the docker log before shutting the node down\033[0m" return 1 else echo echo -e "\033[32;1mSUCCESS:\033[0m Detached and healthy: ${1} on docker network: ${network_name}\033[0m" return 0 fi } elasticsearch-py-7.17.6/.ci/make.sh000077500000000000000000000120011426163262700170040ustar00rootroot00000000000000#!/usr/bin/env bash # ------------------------------------------------------- # # # Skeleton for common build entry script for all elastic # clients. Needs to be adapted to individual client usage. # # Must be called: ./.ci/make.sh # # Version: 1.1.0 # # Targets: # --------------------------- # assemble : build client artefacts with version # bump : bump client internals to version # codegen : generate endpoints # docsgen : generate documentation # examplegen : generate the doc examples # clean : clean workspace # # ------------------------------------------------------- # # ------------------------------------------------------- # # Bootstrap # ------------------------------------------------------- # script_path=$(dirname "$(realpath -s "$0")") repo=$(realpath "$script_path/../") # shellcheck disable=SC1090 CMD=$1 TASK=$1 TASK_ARGS=() VERSION=$2 STACK_VERSION=$VERSION set -euo pipefail product="elastic/elasticsearch-py" output_folder=".ci/output" codegen_folder=".ci/output" OUTPUT_DIR="$repo/${output_folder}" REPO_BINDING="${OUTPUT_DIR}:/sln/${output_folder}" WORKFLOW="${WORKFLOW-staging}" mkdir -p "$OUTPUT_DIR" echo -e "\033[34;1mINFO:\033[0m PRODUCT ${product}\033[0m" echo -e "\033[34;1mINFO:\033[0m VERSION ${STACK_VERSION}\033[0m" echo -e "\033[34;1mINFO:\033[0m OUTPUT_DIR ${OUTPUT_DIR}\033[0m" # ------------------------------------------------------- # # Parse Command # ------------------------------------------------------- # case $CMD in clean) echo -e "\033[36;1mTARGET: clean workspace $output_folder\033[0m" rm -rf "$output_folder" echo -e "\033[32;1mdone.\033[0m" exit 0 ;; assemble) if [ -v $VERSION ]; then echo -e "\033[31;1mTARGET: assemble -> missing version parameter\033[0m" exit 1 fi echo -e "\033[36;1mTARGET: assemble artefact $VERSION\033[0m" TASK=release TASK_ARGS=("$VERSION" "$output_folder") ;; codegen) if [ -v $VERSION ]; then echo -e "\033[31;1mTARGET: codegen -> missing version parameter\033[0m" exit 1 fi echo -e "\033[36;1mTARGET: codegen API v$VERSION\033[0m" TASK=codegen # VERSION is BRANCH here for now TASK_ARGS=("$VERSION" "$codegen_folder") ;; docsgen) if [ -v $VERSION ]; then echo -e "\033[31;1mTARGET: docsgen -> missing version parameter\033[0m" exit 1 fi echo -e "\033[36;1mTARGET: generate docs for $VERSION\033[0m" TASK=codegen # VERSION is BRANCH here for now TASK_ARGS=("$VERSION" "$codegen_folder") ;; examplesgen) echo -e "\033[36;1mTARGET: generate examples\033[0m" TASK=codegen # VERSION is BRANCH here for now TASK_ARGS=("$VERSION" "$codegen_folder") ;; bump) if [ -v $VERSION ]; then echo -e "\033[31;1mTARGET: bump -> missing version parameter\033[0m" exit 1 fi echo -e "\033[36;1mTARGET: bump to version $VERSION\033[0m" TASK=bump # VERSION is BRANCH here for now TASK_ARGS=("$VERSION") ;; *) echo -e "\nUsage:\n\t $CMD is not supported right now\n" exit 1 esac # ------------------------------------------------------- # # Build Container # ------------------------------------------------------- # echo -e "\033[34;1mINFO: building $product container\033[0m" docker build \ --build-arg BUILDER_UID="$(id -u)" \ --file $repo/.ci/Dockerfile \ --tag ${product} \ . # ------------------------------------------------------- # # Run the Container # ------------------------------------------------------- # echo -e "\033[34;1mINFO: running $product container\033[0m" if [[ "$CMD" == "assemble" ]]; then # Build dists into .ci/output docker run \ -u "$(id -u)" \ --rm -v $repo/.ci/output:/code/elasticsearch-py/dist \ $product \ /bin/bash -c "python /code/elasticsearch-py/utils/build-dists.py $VERSION" # Verify that there are dists in .ci/output if compgen -G ".ci/output/*" > /dev/null; then # Tarball everything up in .ci/output if [[ "$WORKFLOW" == 'snapshot' ]]; then cd $repo/.ci/output && tar -czvf elasticsearch-py-$VERSION-SNAPSHOT.tar.gz * && cd - else cd $repo/.ci/output && tar -czvf elasticsearch-py-$VERSION.tar.gz * && cd - fi echo -e "\033[32;1mTARGET: successfully assembled client v$VERSION\033[0m" exit 0 else echo -e "\033[31;1mTARGET: assemble failed, empty workspace!\033[0m" exit 1 fi fi if [[ "$CMD" == "bump" ]]; then docker run \ --rm -v $repo:/code/elasticsearch-py \ $product \ /bin/bash -c "python /code/elasticsearch-py/utils/bump-version.py $VERSION" exit 0 fi if [[ "$CMD" == "codegen" ]]; then echo "TODO" fi if [[ "$CMD" == "docsgen" ]]; then echo "TODO" fi if [[ "$CMD" == "examplesgen" ]]; then echo "TODO" fi echo "Must be called with '.ci/make.sh [command]" exit 1 elasticsearch-py-7.17.6/.ci/run-elasticsearch.sh000077500000000000000000000124451426163262700215170ustar00rootroot00000000000000#!/usr/bin/env bash # # Launch one or more Elasticsearch nodes via the Docker image, # to form a cluster suitable for running the REST API tests. # # Export the STACK_VERSION variable, eg. '8.0.0-SNAPSHOT'. # Export the TEST_SUITE variable, eg. 'free' or 'platinum' defaults to 'free'. # Export the NUMBER_OF_NODES variable to start more than 1 node # Version 1.6.0 # - Initial version of the run-elasticsearch.sh script # - Deleting the volume should not dependent on the container still running # - Fixed `ES_JAVA_OPTS` config # - Moved to STACK_VERSION and TEST_VERSION # - Refactored into functions and imports # - Support NUMBER_OF_NODES # - Added 5 retries on docker pull for fixing transient network errors # - Added flags to make local CCR configurations work # - Added action.destructive_requires_name=false as the default will be true in v8 # - Added ingest.geoip.downloader.enabled=false as it causes false positives in testing # - Moved ELASTIC_PASSWORD and xpack.security.enabled to the base arguments for "Security On by default" # - Use https only when TEST_SUITE is "platinum", when "free" use http script_path=$(dirname $(realpath -s $0)) source $script_path/functions/imports.sh set -euo pipefail echo -e "\033[34;1mINFO:\033[0m Take down node if called twice with the same arguments (DETACH=true) or on seperate terminals \033[0m" cleanup_node $es_node_name master_node_name=${es_node_name} cluster_name=${moniker}${suffix} declare -a volumes environment=($(cat <<-END --env ELASTIC_PASSWORD=$elastic_password --env xpack.security.enabled=true --env node.name=$es_node_name --env cluster.name=$cluster_name --env cluster.initial_master_nodes=$master_node_name --env discovery.seed_hosts=$master_node_name --env cluster.routing.allocation.disk.threshold_enabled=false --env bootstrap.memory_lock=true --env node.attr.testattr=test --env path.repo=/tmp --env repositories.url.allowed_urls=http://snapshot.test* --env action.destructive_requires_name=false --env ingest.geoip.downloader.enabled=false --env cluster.deprecation_indexing.enabled=false END )) if [[ "$TEST_SUITE" == "platinum" ]]; then environment+=($(cat <<-END --env xpack.license.self_generated.type=trial --env xpack.security.http.ssl.enabled=true --env xpack.security.http.ssl.verification_mode=certificate --env xpack.security.http.ssl.key=certs/testnode.key --env xpack.security.http.ssl.certificate=certs/testnode.crt --env xpack.security.http.ssl.certificate_authorities=certs/ca.crt --env xpack.security.transport.ssl.enabled=true --env xpack.security.transport.ssl.verification_mode=certificate --env xpack.security.transport.ssl.key=certs/testnode.key --env xpack.security.transport.ssl.certificate=certs/testnode.crt --env xpack.security.transport.ssl.certificate_authorities=certs/ca.crt END )) volumes+=($(cat <<-END --volume $ssl_cert:/usr/share/elasticsearch/config/certs/testnode.crt --volume $ssl_key:/usr/share/elasticsearch/config/certs/testnode.key --volume $ssl_ca:/usr/share/elasticsearch/config/certs/ca.crt END )) else environment+=($(cat <<-END --env xpack.security.http.ssl.enabled=false END )) fi cert_validation_flags="" if [[ "$TEST_SUITE" == "platinum" ]]; then cert_validation_flags="--insecure --cacert /usr/share/elasticsearch/config/certs/ca.crt --resolve ${es_node_name}:443:127.0.0.1" fi # Pull the container, retry on failures up to 5 times with # short delays between each attempt. Fixes most transient network errors. docker_pull_attempts=0 until [ "$docker_pull_attempts" -ge 5 ] do docker pull docker.elastic.co/elasticsearch/"$elasticsearch_container" && break docker_pull_attempts=$((docker_pull_attempts+1)) echo "Failed to pull image, retrying in 10 seconds (retry $docker_pull_attempts/5)..." sleep 10 done NUMBER_OF_NODES=${NUMBER_OF_NODES-1} http_port=9200 for (( i=0; i<$NUMBER_OF_NODES; i++, http_port++ )); do node_name=${es_node_name}$i node_url=${external_elasticsearch_url/9200/${http_port}}$i if [[ "$i" == "0" ]]; then node_name=$es_node_name; fi environment+=($(cat <<-END --env node.name=$node_name END )) echo "$i: $http_port $node_url " volume_name=${node_name}-${suffix}-data volumes+=($(cat <<-END --volume $volume_name:/usr/share/elasticsearch/data${i} END )) # make sure we detach for all but the last node if DETACH=false (default) so all nodes are started local_detach="true" if [[ "$i" == "$((NUMBER_OF_NODES-1))" ]]; then local_detach=$DETACH; fi echo -e "\033[34;1mINFO:\033[0m Starting container $node_name \033[0m" set -x docker run \ -u "$(id -u)" \ --name "$node_name" \ --network "$network_name" \ --env "ES_JAVA_OPTS=-Xms1g -Xmx1g -da:org.elasticsearch.xpack.ccr.index.engine.FollowingEngineAssertions" \ "${environment[@]}" \ "${volumes[@]}" \ --publish "$http_port":9200 \ --ulimit nofile=65536:65536 \ --ulimit memlock=-1:-1 \ --detach="$local_detach" \ --health-cmd="curl $cert_validation_flags --fail $elasticsearch_url/_cluster/health || exit 1" \ --health-interval=2s \ --health-retries=20 \ --health-timeout=2s \ --rm \ docker.elastic.co/elasticsearch/"$elasticsearch_container"; set +x if wait_for_container "$es_node_name" "$network_name"; then echo -e "\033[32;1mSUCCESS:\033[0m Running on: $node_url\033[0m" fi done elasticsearch-py-7.17.6/.ci/run-repository.sh000077500000000000000000000041031426163262700211140ustar00rootroot00000000000000#!/usr/bin/env bash # Called by entry point `run-test` use this script to add your repository specific test commands # Once called Elasticsearch is up and running and the following parameters are available to this script # ELASTICSEARCH_VERSION -- version e.g Major.Minor.Patch(-Prelease) # ELASTICSEARCH_CONTAINER -- the docker moniker as a reference to know which docker image distribution is used # ELASTICSEARCH_URL -- The url at which elasticsearch is reachable # NETWORK_NAME -- The docker network name # NODE_NAME -- The docker container name also used as Elasticsearch node name # When run in CI the test-matrix is used to define additional variables # TEST_SUITE -- either `oss` or `xpack`, defaults to `oss` in `run-tests` set -e echo -e "\033[34;1mINFO:\033[0m URL ${ELASTICSEARCH_URL}\033[0m" echo -e "\033[34;1mINFO:\033[0m VERSION ${ELASTICSEARCH_VERSION}\033[0m" echo -e "\033[34;1mINFO:\033[0m CONTAINER ${ELASTICSEARCH_CONTAINER}\033[0m" echo -e "\033[34;1mINFO:\033[0m TEST_SUITE ${TEST_SUITE}\033[0m" echo -e "\033[34;1mINFO:\033[0m PYTHON_VERSION ${PYTHON_VERSION}\033[0m" echo -e "\033[34;1mINFO:\033[0m PYTHON_CONNECTION_CLASS ${PYTHON_CONNECTION_CLASS}\033[0m" echo -e "\033[1m>>>>> Build [elastic/elasticsearch-py container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" docker build \ --file .ci/Dockerfile \ --tag elastic/elasticsearch-py \ --build-arg PYTHON_VERSION=${PYTHON_VERSION} \ . echo -e "\033[1m>>>>> Run [elastic/elasticsearch-py container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" if [[ "$STACK_VERSION" == "8.0.0-SNAPSHOT" ]]; then export ELASTIC_CLIENT_APIVERSIONING="true" fi mkdir -p junit docker run \ -u "$(id -u)" \ --network=${network_name} \ --env "STACK_VERSION=${STACK_VERSION}" \ --env "ELASTICSEARCH_URL=${elasticsearch_url}" \ --env "TEST_SUITE=${TEST_SUITE}" \ --env "PYTHON_CONNECTION_CLASS=${PYTHON_CONNECTION_CLASS}" \ --env "TEST_TYPE=server" \ --env "ELASTIC_CLIENT_APIVERSIONING=${ELASTIC_CLIENT_APIVERSIONING:-false}" \ --name elasticsearch-py \ --rm \ elastic/elasticsearch-py \ python setup.py test elasticsearch-py-7.17.6/.ci/run-tests000077500000000000000000000020341426163262700174270ustar00rootroot00000000000000#!/usr/bin/env bash # # Version 1.1 # - Moved to .ci folder and seperated out `run-repository.sh` # - Add `$RUNSCRIPTS` env var for running Elasticsearch dependent products # Default environment variables export STACK_VERSION="${STACK_VERSION:=8.0.0-SNAPSHOT}" export TEST_SUITE="${TEST_SUITE:=platinum}" export PYTHON_VERSION="${PYTHON_VERSION:=3.9}" export PYTHON_CONNECTION_CLASS="${PYTHON_CONNECTION_CLASS:=Urllib3HttpConnection}" script_path=$(dirname $(realpath -s $0)) source $script_path/functions/imports.sh set -euo pipefail echo -e "\033[1m>>>>> Start [$STACK_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" DETACH=true bash .ci/run-elasticsearch.sh if [[ -n "$RUNSCRIPTS" ]]; then for RUNSCRIPT in ${RUNSCRIPTS//,/ } ; do echo -e "\033[1m>>>>> Running run-$RUNSCRIPT.sh >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" CONTAINER_NAME=${RUNSCRIPT} \ DETACH=true \ bash .ci/run-${RUNSCRIPT}.sh done fi echo -e "\033[1m>>>>> Repository specific tests >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m" bash .ci/run-repository.sh elasticsearch-py-7.17.6/.ci/test-matrix.yml000077500000000000000000000011331426163262700205430ustar00rootroot00000000000000STACK_VERSION: - "7.17.0-SNAPSHOT" - "8.0.0-SNAPSHOT" TEST_SUITE: - platinum PYTHON_VERSION: - "2.7" - "3.4" - "3.5" - "3.6" - "3.7" - "3.8" - "3.9" PYTHON_CONNECTION_CLASS: - Urllib3HttpConnection - RequestsHttpConnection exclude: # TODO: Remove for 7.16 branch - STACK_VERSION: "8.0.0-SNAPSHOT" PYTHON_VERSION: "3.4" - STACK_VERSION: "8.0.0-SNAPSHOT" PYTHON_VERSION: "3.5" - STACK_VERSION: "8.0.0-SNAPSHOT" PYTHON_VERSION: "3.6" - STACK_VERSION: "8.0.0-SNAPSHOT" PYTHON_VERSION: "3.7" - STACK_VERSION: "8.0.0-SNAPSHOT" PYTHON_VERSION: "3.8" elasticsearch-py-7.17.6/.coveragerc000066400000000000000000000003211426163262700172020ustar00rootroot00000000000000[run] omit = */python?.?/* */lib-python/?.?/*.py */lib_pypy/* */site-packages/* *.egg/* test_elasticsearch/* [report] show_missing = True exclude_lines= raise NotImplementedError* elasticsearch-py-7.17.6/.dockerignore000066400000000000000000000000451426163262700175400ustar00rootroot00000000000000docs example venv .tox .nox .*_cache elasticsearch-py-7.17.6/.github/000077500000000000000000000000001426163262700164255ustar00rootroot00000000000000elasticsearch-py-7.17.6/.github/ISSUE_TEMPLATE.md000066400000000000000000000021121426163262700211260ustar00rootroot00000000000000 **Describe the feature**: **Elasticsearch version** (`bin/elasticsearch --version`): **`elasticsearch-py` version (`elasticsearch.__versionstr__`)**: Please make sure the major version matches the Elasticsearch server you are running. **Description of the problem including expected versus actual behavior**: **Steps to reproduce**: elasticsearch-py-7.17.6/.github/workflows/000077500000000000000000000000001426163262700204625ustar00rootroot00000000000000elasticsearch-py-7.17.6/.github/workflows/ci.yml000066400000000000000000000030371426163262700216030ustar00rootroot00000000000000--- name: CI on: [push, pull_request] jobs: lint: runs-on: ubuntu-latest steps: - name: Checkout Repository uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v2 with: python-version: 3.7 - name: Install dependencies run: | python3.7 -m pip install nox - name: Lint the code run: nox -s lint docs: runs-on: ubuntu-latest steps: - name: Checkout Repository uses: actions/checkout@v2 - name: Set up Python 3.7 uses: actions/setup-python@v2 with: python-version: 3.7 - name: Install dependencies run: | python3.7 -m pip install nox - name: Build the docs run: nox -s docs test-linux: strategy: fail-fast: false matrix: python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9] experimental: [false] include: - python-version: 3.10-dev experimental: true runs-on: ubuntu-latest name: test-${{ matrix.python-version }} continue-on-error: ${{ matrix.experimental }} steps: - name: Checkout Repository uses: actions/checkout@v2 - name: Set Up Python - ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install Dependencies run: | python -m pip install -r dev-requirements.txt - name: Run Tests run: | python setup.py test elasticsearch-py-7.17.6/.github/workflows/unified-release.yml000066400000000000000000000007701426163262700242520ustar00rootroot00000000000000name: Unified Release on: pull_request: paths-ignore: - 'README.md' push: paths-ignore: - 'README.md' branches: - main - master - '[0-9]+.[0-9]+' - '[0-9]+.x' jobs: assemble: name: Assemble runs-on: ubuntu-latest env: STACK_VERSION: "7.17-SNAPSHOT" steps: - name: Checkout uses: actions/checkout@v2 - name: "Assemble ${{ env.STACK_VERSION }}" run: "./.ci/make.sh assemble ${{ env.STACK_VERSION }}" elasticsearch-py-7.17.6/.gitignore000066400000000000000000000041631426163262700170610ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ cover/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/sphinx/_build/ # PyBuilder .pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: # .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # pytype static type analyzer .pytype/ # Cython debug symbols cython_debug/ # Pycharm project settings .idea # elasticsearch files test_elasticsearch/cover test_elasticsearch/local.py .ci/output elasticsearch-py-7.17.6/.readthedocs.yml000066400000000000000000000002441426163262700201530ustar00rootroot00000000000000version: 2 sphinx: configuration: docs/sphinx/conf.py python: version: 3.7 install: - method: pip path: . - requirements: dev-requirements.txt elasticsearch-py-7.17.6/CHANGELOG.md000066400000000000000000000001371426163262700166770ustar00rootroot00000000000000See: https://www.elastic.co/guide/en/elasticsearch/client/python-api/master/release-notes.html elasticsearch-py-7.17.6/CODE_OF_CONDUCT.md000066400000000000000000000000631426163262700176630ustar00rootroot00000000000000See: https://www.elastic.co/community/codeofconductelasticsearch-py-7.17.6/CONTRIBUTING.md000066400000000000000000000070111426163262700173150ustar00rootroot00000000000000# Contributing to the Python Elasticsearch Client If you have a bugfix or new feature that you would like to contribute to elasticsearch-py, please find or open an issue about it first. Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change. We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code. ## Running Elasticsearch locally We've provided a script to start an Elasticsearch cluster of a certain version found at `.ci/run-elasticsearch.sh`. There are several environment variables that control integration tests: - `PYTHON_VERSION`: Version of Python to use, defaults to `3.9` - `PYTHON_CONNECTION_CLASS`: Connection class to use, defaults to `Urllib3HttpConnection` - `STACK_VERSION`: Version of Elasticsearch to use. These should be the same as tags of `docker.elastic.co/elasticsearch/elasticsearch` such as `8.0.0-SNAPSHOT`, `7.x-SNAPSHOT`, etc. Defaults to the same `*-SNAPSHOT` version as the branch. **NOTE: You don't need to run the live integration tests for all changes. If you don't have Elasticsearch running locally the integration tests will be skipped.** ## API Code Generation All the API methods (any method in `elasticsearch.client` classes decorated with `@query_params`) are actually auto-generated from the [rest-api-spec](https://github.com/elastic/elasticsearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/api) found in the `Elasticsearch` or the [Elasticsearch specification](https://github.com/elastic/elasticsearch-specification) repositories. Any changes to those methods should be done either by submitting a PR to one of these repositories instead of directly to the Python client otherwise your change will be overwritten the next time the APIs are generated. ## Contributing Code Changes The process for contributing to any of the Elasticsearch repositories is similar. 1. Please make sure you have signed the [Contributor License Agreement](http://www.elastic.co/contributor-agreement/). We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction. We ask this of all contributors in order to assure our users of the origin and continuing existence of the code. You only need to sign the CLA once. 2. Run the linter and test suite to ensure your changes do not break existing code: ``` # Install Nox for task management $ python -m pip install nox # Auto-format and lint your changes $ nox -rs format # Run the test suite $ nox -rs test ``` 3. Rebase your changes. Update your local repository with the most recent code from the main elasticsearch-py repository, and rebase your branch on top of the latest master branch. We prefer your changes to be squashed into a single commit for easier backporting. 4. Submit a pull request. Push your local changes to your forked copy of the repository and submit a pull request. In the pull request, describe what your changes do and mention the number of the issue where discussion has taken place, eg “Closes #123″. Please consider adding or modifying tests related to your changes. Then sit back and wait. There will probably be a discussion about the pull request and, if any changes are needed, we would love to work with you to get your pull request merged into elasticsearch-py. elasticsearch-py-7.17.6/LICENSE000066400000000000000000000236371426163262700161050ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. elasticsearch-py-7.17.6/MANIFEST.in000066400000000000000000000005041426163262700166220ustar00rootroot00000000000000include CONTRIBUTING.md include LICENSE include MANIFEST.in include README.rst include NOTICE include CHANGELOG.md include setup.py recursive-include elasticsearch* py.typed *.pyi recursive-include docs/sphinx * prune docs/sphinx/_build prune test_elasticsearch recursive-exclude * __pycache__ recursive-exclude * *.py[co] elasticsearch-py-7.17.6/NOTICE000066400000000000000000000000751426163262700157730ustar00rootroot00000000000000Elasticsearch Python Client Copyright 2022 Elasticsearch B.V.elasticsearch-py-7.17.6/README.rst000066400000000000000000000065741426163262700165700ustar00rootroot00000000000000.. raw:: html Elasticsearch Python Client =========================== .. image:: https://img.shields.io/pypi/v/elasticsearch :target: https://pypi.org/project/elasticsearch .. image:: https://img.shields.io/conda/vn/conda-forge/elasticsearch?color=blue :target: https://anaconda.org/conda-forge/elasticsearch .. image:: https://pepy.tech/badge/elasticsearch :target: https://pepy.tech/project/elasticsearch?versions=* .. image:: https://clients-ci.elastic.co/job/elastic+elasticsearch-py+master/badge/icon :target: https://clients-ci.elastic.co/job/elastic+elasticsearch-py+master .. image:: https://readthedocs.org/projects/elasticsearch-py/badge/?version=latest&style=flat :target: https://elasticsearch-py.readthedocs.io *The official Python client for Elasticsearch.* Features -------- * Translating basic Python data types to and from JSON * Configurable automatic discovery of cluster nodes * Persistent connections * Load balancing (with pluggable selection strategy) across available nodes * Failed connection penalization (time based - failed connections won't be retried until a timeout is reached) * Support for TLS and HTTP authentication * Thread safety across requests * Pluggable architecture * Helper functions for idiomatically using APIs together Installation ------------ Install the ``elasticsearch`` package with `pip `_:: $ python -m pip install elasticsearch If your application uses async/await in Python you can install with the ``async`` extra:: $ python -m pip install elasticsearch[async] Read more about `how to use asyncio with this project `_. Compatibility ------------- Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch. Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made. If you have a need to have multiple versions installed at the same time older versions are also released as ``elasticsearch2`` and ``elasticsearch5``. Documentation ------------- Documentation for the client is `available on elastic.co`_ and `Read the Docs`_. .. _available on elastic.co: https://www.elastic.co/guide/en/elasticsearch/client/python-api/current/index.html .. _Read the Docs: https://elasticsearch-py.readthedocs.io Quick Start ----------- .. code-block:: python # Import the client from the 'elasticsearch' module >>> from elasticsearch import Elasticsearch # Instantiate a client instance >>> client = Elasticsearch("http://localhost:9200") # Call an API, in this example `info()` >>> resp = client.info() # View the result >>> resp { "name" : "instance-name", "cluster_name" : "cluster-name", "cluster_uuid" : "cluster-uuid", "version" : { "number" : "7.14.0", ... }, "tagline" : "You know, for Search" } You can read more about `configuring the client`_ in the documentation. .. _configuring the client: https://www.elastic.co/guide/en/elasticsearch/client/python-api/current/connecting.html License ------- Copyright 2021 Elasticsearch B.V. Licensed under the Apache License, Version 2.0. elasticsearch-py-7.17.6/dev-requirements.txt000066400000000000000000000013761426163262700211340ustar00rootroot00000000000000requests>=2, <3 pytest pytest-cov coverage mock sphinx jinja2 python-dateutil # Testing the 'search_mvt' API response mapbox-vector-tile; python_version!="3.4.*" # For mapbox-vector-tile, package broke Python 2 support without an annotation. # See: protocolbuffers/protobuf#8984 protobuf<3.18; python_version!="3.4.*" # No wheels for Python 3.10 yet! numpy; python_version<"3.10" pandas; python_version<"3.10" # PyYAML 5.3 dropped support for Python 3.4 while # not amending that requirement to the package. :( pyyaml>=5.4; python_version>="3.6" pyyaml<5.3; python_version<"3.6" isort black; python_version>="3.6" twine # Requirements for testing [async] extra aiohttp; python_version>="3.6" pytest-asyncio; python_version>="3.6" unasync; python_version>="3.6" elasticsearch-py-7.17.6/docs/000077500000000000000000000000001426163262700160155ustar00rootroot00000000000000elasticsearch-py-7.17.6/docs/guide/000077500000000000000000000000001426163262700171125ustar00rootroot00000000000000elasticsearch-py-7.17.6/docs/guide/configuration.asciidoc000066400000000000000000000004051426163262700234600ustar00rootroot00000000000000[[config]] == Configuration This page contains information about the most important configuration options of the Python {es} client. * <> * <> include::connection-pool.asciidoc[] include::connection-selector.asciidoc[]elasticsearch-py-7.17.6/docs/guide/connecting.asciidoc000066400000000000000000000111331426163262700227400ustar00rootroot00000000000000[[connecting]] == Connecting This page contains the information you need to connect the Client with {es}. [discrete] [[authentication]] === Authentication This section contains code snippets to show you how to connect to various {es} providers. [discrete] [[auth-ec]] ==== Elastic Cloud Cloud ID is an easy way to configure your client to work with your Elastic Cloud deployment. Combine the `cloud_id` with either `http_auth` or `api_key` to authenticate with your Elastic Cloud deployment. Using `cloud_id` enables TLS verification and HTTP compression by default and sets the port to 443 unless otherwise overwritten via the port parameter or the port value encoded within `cloud_id`. Using Cloud ID also disables sniffing. [source,py] ---------------------------- from elasticsearch import Elasticsearch es = Elasticsearch( cloud_id=”cluster-1:dXMa5Fx...” ) ---------------------------- [discrete] [[auth-http]] ==== HTTP Authentication HTTP authentication uses the `http_auth` parameter by passing in a username and password within a tuple: [source,py] ---------------------------- from elasticsearch import Elasticsearch es = Elasticsearch( http_auth=(“username”, “password”) ) ---------------------------- [discrete] [[auth-apikey]] ==== ApiKey authentication You can configure the client to use {es}'s API Key for connecting to your cluster. [source,py] ---------------------------- from elasticsearch import Elasticsearch es = Elasticsearch( api_key=(“api_key_id”, “api_key_secret”) ) ---------------------------- [discrete] [[compatibility-mode]] === Enabling the Compatibility Mode The Elasticsearch server version 8.0 is introducing a new compatibility mode that allows you a smoother upgrade experience from 7 to 8. In a nutshell, you can use the latest 7.x Python Elasticsearch Elasticsearch client with an 8.x Elasticsearch server, giving more room to coordinate the upgrade of your codebase to the next major version. If you want to leverage this functionality, please make sure that you are using the latest 7.x Python Elasticsearch client and set the environment variable `ELASTIC_CLIENT_APIVERSIONING` to `true`. The client is handling the rest internally. For every 8.0 and beyond Python Elasticsearch client, you're all set! The compatibility mode is enabled by default. [discrete] [[connecting-faas]] === Using the Client in a Function-as-a-Service Environment This section illustrates the best practices for leveraging the {es} client in a Function-as-a-Service (FaaS) environment. The most influential optimization is to initialize the client outside of the function, the global scope. This practice does not only improve performance but also enables background functionality as – for example – https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[sniffing]. The following examples provide a skeleton for the best practices. [discrete] [[connecting-faas-gcp]] ==== GCP Cloud Functions [source,py] ---------------------------- from elasticsearch import Elasticsearch client = Elasticsearch( ... # Client initialization ) def main(request): ... # Use the client ---------------------------- [discrete] [[connecting-faas-aws]] ==== AWS Lambda [source,py] ---------------------------- from elasticsearch import Elasticsearch client = Elasticsearch( ... # Client initialization ) def main(event, context): ... # Use the client ---------------------------- [discrete] [[connecting-faas-azure]] ==== Azure Functions [source,py] ---------------------------- import azure.functions as func from elasticsearch import Elasticsearch client = Elasticsearch( ... # Client initialization ) def main(request: func.HttpRequest) -> func.HttpResponse: ... # Use the client ---------------------------- IMPORTANT: The async client shouldn't be used within Function-as-a-Service as a new event loop must be started for each invocation. Instead the synchronous `Elasticsearch` client is recommended. Resources used to assess these recommendations: * https://cloud.google.com/functions/docs/bestpractices/tips#use_global_variables_to_reuse_objects_in_future_invocations[GCP Cloud Functions: Tips & Tricks] * https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html[Best practices for working with AWS Lambda functions] * https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python?tabs=azurecli-linux%2Capplication-level#global-variables[Azure Functions Python developer guide] * https://docs.aws.amazon.com/lambda/latest/operatorguide/global-scope.html[AWS Lambda: Comparing the effect of global scope] elasticsearch-py-7.17.6/docs/guide/connection-pool.asciidoc000066400000000000000000000016171426163262700237250ustar00rootroot00000000000000[[connection-pool]] === Connection pool Connection pool is a container that holds the `Connection` instances, manages the selection process (via a `ConnectionSelector`) and dead connections. Initially connections are stored in the class as a list and – along with the connection options – get passed to the `ConnectionSelector` instance for future reference. Upon each request, the `Transport` asks for a `Connection` via the `get_connection` method. If the connection fails, it is marked as dead (via `mark_dead`) and put on a timeout. When the timeout is over the connection is resurrected and returned to the live pool. A connection that has been previously marked as dead and then succeeds is marked as live (its fail count is deleted). For reference information, refer to the https://elasticsearch-py.readthedocs.io/en/latest/connection.html#connection-pool[full {es} Python documentation].elasticsearch-py-7.17.6/docs/guide/connection-selector.asciidoc000066400000000000000000000020471426163262700245720ustar00rootroot00000000000000[[connection-selector]] === Connection selector Connection selector is a simple class used to select a connection from a list of currently live connection instances. Initially, it is passed a dictionary containing all the connections options which it can then use during the selection process. When the _select_ method is called it is given a list of currently live connections to choose from. The options dictionary is passed to `Transport` as the hosts parameter and the same is used to construct the connection object itself. When the connection was created based on information retrieved from the cluster via the sniffing process, it is the dictionary returned by the `host_info_callback`. Example of where this might be useful is a zone-aware selector that would only select connections from its own zones and only fall back to other connections where there would be none in its zones. For reference information, refer to the https://elasticsearch-py.readthedocs.io/en/latest/connection.html#connection-selector[full {es} Python documentation].elasticsearch-py-7.17.6/docs/guide/examples.asciidoc000066400000000000000000000044351426163262700224360ustar00rootroot00000000000000[[examples]] == Examples Below you can find examples of how to use the most frequently called APIs with the Python client. * <> * <> * <> * <> * <> * <> [discrete] [[ex-index]] === Indexing a document To index a document, you need to specify three pieces of information: `index`, `id`, and a `body`: [source,py] ---------------------------- from datetime import datetime from elasticsearch import Elasticsearch es = Elasticsearch() doc = { 'author': 'author_name', 'text': 'Interesting content...', 'timestamp': datetime.now(), } res = es.index(index="test-index", id=1, body=doc) print(res['result']) ---------------------------- [discrete] [[ex-get]] === Getting a document To get a document, you need to specify its `index` and `id`: [source,py] ---------------------------- res = es.get(index="test-index", id=1) print(res['_source']) ---------------------------- [discrete] [[ex-refresh]] === Refreshing an index You can perform the refresh operation on an index: [source,py] ---------------------------- es.indices.refresh(index="test-index") ---------------------------- [discrete] [[ex-search]] === Searching for a document The `search()` method returns results that are matching a query: [source,py] ---------------------------- res = es.search(index="test-index", query={"match_all": {}}) print("Got %d Hits:" % res['hits']['total']['value']) for hit in res['hits']['hits']: print("%(timestamp)s %(author)s: %(text)s" % hit["_source"]) ---------------------------- [discrete] [[ex-update]] === Updating a document To update a document, you need to specify three pieces of information: `index`, `id`, and a `body`: [source,py] ---------------------------- from datetime import datetime from elasticsearch import Elasticsearch es = Elasticsearch() doc = { 'author': 'author_name', 'text': 'Interesting modified content...', 'timestamp': datetime.now(), } res = es.update(index="test-index", id=1, body=doc) print(res['result']) ---------------------------- [discrete] [[ex-delete]] === Deleting a document You can delete a document by specifying its `index`, and `id` in the `delete()` method: [source,py] ---------------------------- es.delete(index="test-index", id=1) ---------------------------- elasticsearch-py-7.17.6/docs/guide/helpers.asciidoc000066400000000000000000000047271426163262700222660ustar00rootroot00000000000000[[client-helpers]] == Client helpers You can find here a collection of simple helper functions that abstract some specifics of the raw API. For detailed examples, refer to https://elasticsearch-py.readthedocs.io/en/stable/helpers.html[this page]. [discrete] [[bulk-helpers]] === Bulk helpers There are several helpers for the bulk API since its requirement for specific formatting and other considerations can make it cumbersome if used directly. All bulk helpers accept an instance of `{es}` class and an iterable `action` (any iterable, can also be a generator, which is ideal in most cases since it allows you to index large datasets without the need of loading them into memory). The items in the iterable `action` should be the documents we wish to index in several formats. The most common one is the same as returned by `search()`, for example: [source,yml] ---------------------------- { '_index': 'index-name', '_type': 'document', '_id': 42, '_routing': 5, 'pipeline': 'my-ingest-pipeline', '_source': { "title": "Hello World!", "body": "..." } } ---------------------------- Alternatively, if `_source` is not present, it pops all metadata fields from the doc and use the rest as the document data: [source,yml] ---------------------------- { "_id": 42, "_routing": 5, "title": "Hello World!", "body": "..." } ---------------------------- The `bulk()` api accepts `index`, `create`, `delete`, and `update` actions. Use the `_op_type` field to specify an action (`_op_type` defaults to `index`): [source,yml] ---------------------------- { '_op_type': 'delete', '_index': 'index-name', '_type': 'document', '_id': 42, } { '_op_type': 'update', '_index': 'index-name', '_type': 'document', '_id': 42, 'doc': {'question': 'The life, universe and everything.'} } ---------------------------- [discrete] [[scan]] === Scan Simple abstraction on top of the `scroll()` API - a simple iterator that yields all hits as returned by underlining scroll requests. By default scan does not return results in any pre-determined order. To have a standard order in the returned documents (either by score or explicit sort definition) when scrolling, use `preserve_order=True`. This may be an expensive operation and will negate the performance benefits of using `scan`. [source,py] ---------------------------- scan(es, query={"query": {"match": {"title": "python"}}}, index="orders-*", doc_type="books" ) ----------------------------elasticsearch-py-7.17.6/docs/guide/index.asciidoc000066400000000000000000000005661426163262700217300ustar00rootroot00000000000000= Elasticsearch Python Client :doctype: book include::{asciidoc-dir}/../../shared/attributes.asciidoc[] include::overview.asciidoc[] include::installation.asciidoc[] include::connecting.asciidoc[] include::configuration.asciidoc[] include::integrations.asciidoc[] include::examples.asciidoc[] include::helpers.asciidoc[] include::release-notes.asciidoc[]elasticsearch-py-7.17.6/docs/guide/installation.asciidoc000066400000000000000000000010711426163262700233120ustar00rootroot00000000000000[[installation]] == Installation The Python client for {es} can be installed with pip: [source,sh] ------------------------------------- $ python -m pip install elasticsearch ------------------------------------- If your application uses async/await in Python you can install with the `async` extra: [source,sh] -------------------------------------------- $ python -m pip install elasticsearch[async] -------------------------------------------- Read more about https://elasticsearch-py.readthedocs.io/en/master/async.html[how to use Asyncio with this project].elasticsearch-py-7.17.6/docs/guide/integrations.asciidoc000066400000000000000000000016041426163262700233210ustar00rootroot00000000000000[[integrations]] == Integrations You can find integration options and information on this page. [discrete] [[transport]] === Transport The `Transport` class is a subclass of the https://elasticsearch-py.readthedocs.io/en/latest/connection.html[Connection Layer API] that contains all the classes that are responsible for handling the connection to the {es} cluster. The `Transport` class is an encapsulation of the transport-related logic of the Python client. For the exhaustive list of parameters, refer to the https://elasticsearch-py.readthedocs.io/en/latest/connection.html#transport[documentation]. [discrete] [[transport-classes]] ==== Transport classes The `Transport` classes can be used to maintain connection with an {es} cluster. For the reference information of these classes, refer to the https://elasticsearch-py.readthedocs.io/en/latest/transports.html[documentation]. elasticsearch-py-7.17.6/docs/guide/overview.asciidoc000066400000000000000000000065611426163262700224700ustar00rootroot00000000000000[[overview]] == Overview This is the official low-level Python client for {es}. Its goal is to provide common ground for all {es}-related code in Python. For this reason, the client is designed to be unopinionated and extendable. Full documentation is available on https://elasticsearch-py.readthedocs.io[Read the Docs]. [discrete] === Compatibility Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch. Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made. If you have a need to have multiple versions installed at the same time older versions are also released as `elasticsearch2` and `elasticsearch5`. [discrete] === Example use Simple use-case: [source,python] ------------------------------------ >>> from datetime import datetime >>> from elasticsearch import Elasticsearch # By default we connect to localhost:9200 >>> es = Elasticsearch() # Datetimes will be serialized... >>> es.index(index="my-index-000001", doc_type="test-type", id=42, body={"any": "data", "timestamp": datetime.now()}) {'_id': '42', '_index': 'my-index-000001', '_type': 'test-type', '_version': 1, 'ok': True} # ...but not deserialized >>> es.get(index="my-index-000001", doc_type="test-type", id=42)['_source'] {'any': 'data', 'timestamp': '2013-05-12T19:45:31.804229'} ------------------------------------ [NOTE] All the API calls map the raw REST API as closely as possible, including the distinction between required and optional arguments to the calls. This means that the code makes distinction between positional and keyword arguments; we, however, recommend that people use keyword arguments for all calls for consistency and safety. TIP: For an elaborate example of how to ingest data into Elastic Cloud, refer to {cloud}/ec-getting-started-python.html[this page]. [discrete] === Features The client's features include: * Translating basic Python data types to and from JSON * Configurable automatic discovery of cluster nodes * Persistent connections * Load balancing (with pluggable selection strategy) across all available nodes * Failed connection penalization (time based - failed connections won't be retried until a timeout is reached) * Thread safety * Pluggable architecture The client also contains a convenient set of https://elasticsearch-py.readthedocs.org/en/master/helpers.html[helpers] for some of the more engaging tasks like bulk indexing and reindexing. [discrete] === Elasticsearch DSL For a more high level client library with more limited scope, have a look at https://elasticsearch-dsl.readthedocs.org/[elasticsearch-dsl] - a more Pythonic library sitting on top of `elasticsearch-py`. It provides a more convenient and idiomatic way to write and manipulate https://elasticsearch-dsl.readthedocs.org/en/latest/search_dsl.html[queries]. It stays close to the Elasticsearch JSON DSL, mirroring its terminology and structure while exposing the whole range of the DSL from Python either directly using defined classes or a queryset-like expressions. It also provides an optional https://elasticsearch-dsl.readthedocs.org/en/latest/persistence.html#doctype[persistence layer] for working with documents as Python objects in an ORM-like fashion: defining mappings, retrieving and saving documents, wrapping the document data in user-defined classes. elasticsearch-py-7.17.6/docs/guide/release-notes.asciidoc000066400000000000000000000453231426163262700233670ustar00rootroot00000000000000[[release-notes]] == Release notes * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> * <> [discrete] [[rn-7-17-4]] === 7.17.4 (2022-06-01) * Client is compatible with Elasticsearch 7.17.4 [discrete] [[rn-7-17-3]] === 7.17.3 (2022-04-26) * Client is compatible with Elasticsearch 7.17.3 [discrete] [[rn-7-17-2]] === 7.17.2 (2022-03-30) * Client is compatible with Elasticsearch 7.17.2 [discrete] [[rn-7-17-1]] === 7.17.1 (2022-02-28) * Fixed `AiohttpHttpConnection`` to not leak TLS connections when the socket isn't explicitly shutdown by the peer * Fixed the `from` parameter to be rewritten to `from_` when used with the `scan` and `async_scan` helpers [discrete] [[rn-7-17-0]] === 7.17.0 (2022-02-01) * Client is compatible with Elasticsearch 7.17.0 [discrete] [[rn-7-16-3]] === 7.16.3 (2022-01-13) * Client is compatible with Elasticsearch 7.16.3 [discrete] [[rn-7-16-2]] === 7.16.2 (2021-12-27) * Client is compatible with Elasticsearch 7.16.2 [discrete] [[rn-7-16-1]] === 7.16.1 (2021-12-13) [discrete] ==== Fixed * Fixed issue where the `AIOHttpConnection` wouldn't log query parameters for URLs. [discrete] [[rn-7-16-0]] === 7.16.0 (2021-12-07) [discrete] ==== Deprecated * Deprecated the `send_get_body_as` parameter. This parameter is no longer necessary as APIs all use non-GET HTTP methods when using a body. * Removal of `body`, `params`, and other per-request parameters has been delayed beyond 8.0.0. Changed deprecation warnings to mention "future version" instead of 8.0.0. [discrete] ==== Fixed * Fixed an issue with `unicode` HTTP headers with the urllib3 HTTP client * Fixed an issue with the `scan` helper to always set the `sort` and `scroll` parameters [discrete] ==== Search * Changed the `keep_alive` parameter of the `open_point_in_time` API to be required to reflect its required status within Elasticsearch * Added the `track_total_hits` parameter to the `search_mvt` API [discrete] ==== Fleet * Changed the `fleet.global_checkpoints` API from **experimental** to **stable** * Added the `fleet.search` **experimental** API * Added the `fleet.msearch` **experimental** API [discrete] ==== Indices * Added the `indices.modify_data_stream` API [discrete] ==== Ingest * Added the `if_version` parameter to the `ingest.put_pipeline` API [discrete] ==== Migration * Added the `migration.get_feature_upgrade_status` API * Added the `migration.post_feature_upgrade` API [discrete] ==== Machine Learning * Added the `defer_definition_decompression` parameter to the `ml.put_trained_model` API [discrete] ==== Transforms * Added the `transform.upgrade_transforms` API [discrete] [[rn-7-15-2]] === 7.15.2 (2021-11-10) [discrete] ==== Nodes * Documented additional options the `metric` parameter of the `nodes.info` API. [discrete] [[rn-7-15-1]] === 7.15.1 (2021-10-14) [discrete] ==== Client * Fixed a performance regression in `JSONSerializer.default()` when `numpy` and `pandas` weren't installed. * Changed the `DeprecationWarning` for the `body` parameter to be a "removed in a future version" instead of "removed in 8.0" in line with the 8.0 roadmap. [discrete] ==== Search * The `index` parameter of the `open_point_in_time` API is now required, was optional. [discrete] [[rn-7-15-0]] === 7.15.0 (2021-09-22) [discrete] ==== Client * Added more precise type hints to many API parameters * Added explicit parameters to `AsyncTransport` and `AIOHttpConnection` * Added `MapboxVectorTileSerializer` for handling the `application/vnd.mapbox-vector-tile` mimetype. Because this mimetype is binary rather than text the raw response `bytes` are forwarded from the serializer without decoding * Reduced amount of time to import the `elasticsearch` module by delaying imports of `pandas` and `numpy` until later in the JSON serialization stage if necessary * Deprecated positional arguments for APIs, instead use keyword arguments exclusively. [discrete] ==== Search * Added the `search_mvt` **experimental** API * Added body field parameters to the `search`, `scroll`, and `clear_scroll` APIs * Deprecated the `body` parameter of the `search`, `scroll`, and `clear_scroll` APIs [discrete] ==== Documents * Added body field parameters to the `update` API * Added the `document` parameter to the `create` and `index` APIs * Deprecated the `body` parameter of the `create`, `index`, and `update` APIs [discrete] ==== Indices * Added the `indices.disk_usage` **experimental** API * Added the `indices.fields_usage_stats` **experimental** API * Added body field parameters to the `indices.create` API * Deprecated the `body` parameter of the `indices.create` API [discrete] ==== Machine Learning * Added the `ignore_unavailable`, `allow_no_indices`, `ignore_throttled`, and `expand_wildcards` parameters to the `ml.put_job` API [discrete] ==== Nodes * Added the `nodes.clear_repositories_metering_archive` **experimental** API * Added the `nodes.get_repositories_metering_info` **experimental** API * Added the `shards` option to the `index_metric` parameter of the `nodes.stats` API * Deprecated the `doc_type` parameter of the `nodes.hot_threads` API, instead use the `type` parameter [discrete] ==== Security * Added the `security.query_api_keys` API [discrete] ==== License * Deprecated the `doc_type` parameter of the `license.post_start_trial` API, instead use the `type` parameter [discrete] [[rn-7-14-0]] === 7.14.0 (2021-08-02) * Added check that client is connected to an Elasticsearch cluster. If the client isn't connected to a supported Elasticsearch cluster the `UnsupportedProductError` exception will be raised. [discrete] ==== Search * Added the `terms_enum` **beta** API * Removed the `query_and_fetch` and `dfs_query_and_fetch` options in the `search_type` parameter to the `msearch`, `msearch_template` and `search_template` APIs [discrete] ==== Index Lifecycle Management * Added the `ilm.migrate_to_data_tiers` API [discrete] ==== Machine Learning * Added the `ml.reset_job` API [discrete] ==== Security * Added the `security.saml_authenticate` API * Added the `security.saml_complete_logout` API * Added the `security.saml_invalidate` API * Added the `security.saml_logout` API * Added the `security.saml_prepare_authentication` API * Added the `security.saml_service_provider_metadata` API [discrete] ==== SQL * Added the `sql.delete_async` API * Added the `sql.get_async` API * Added the `sql.get_async_status` API [discrete] ==== Snapshots * Added the `include_repository` parameter to `snapshot.get` API * Added the `rarely_abort_writes` parameter to the `snapshot.repository_analyze` API [discrete] [[rn-7-13-4]] === 7.13.4 (2021-07-20) * Client is compatible with Elasticsearch 7.13.4 [discrete] [[rn-7-13-3]] === 7.13.3 (2021-07-07) * `NameError` would be raised on Python 2.7 and 3.4 when a connection error would have otherwise been raised. [discrete] [[rn-7-13-2]] === 7.13.2 (2021-06-21) * Fixed `Transport.perform_request()` to properly reraise `RecursionError` * Fixed `AIOHttpConnection` to no longer send `Accept-Encoding: gzip, deflate` when `http_compress=None`. Instead now sends no `Accept-Encoding` header in the default case [discrete] ==== Snapshot * Added the `snapshot.repository_analyze` API [discrete] [[rn-7-13-1]] === 7.13.1 (2021-06-02) * Client is compatible with Elasticsearch 7.13.1 [discrete] [[rn-7-13-0]] === 7.13.0 (2021-05-25) * Added support for compatibility header for Elasticsearch. If the environment variable `ELASTIC_CLIENT_APIVERSIONING=1` is set the client will send the headers Accept and Content-Type with the following value: `application/vnd.elasticsearch+json;compatible-with=7`. [discrete] ==== Cat * Added the `include_unloaded_segments` parameter to the `cat.nodes` API * Added the `features.reset_features` **experimental** API * Added the `fleet.global_checkpoints` **expiremental** API * Added the `ingest.geo_ip_stats` API [discrete] ==== Machine Learning * Added the `ml.delete_trained_model_alias` API * Added the `ml.preview_data_frame_analytics` API * Added the `ml.put_trained_model_alias` API * Changed the `ml.delete_data_frame_analytics`, `ml.delete_trained_model`, `ml.explain_data_frame_analytics`, `ml.get_data_fram_analytics`, `ml.get_data_frame_analytics_stats`, `ml.get_trained_models`, `ml.get_trained_models_stats`, `ml.put_trained_model`, `ml.start_data_frame_analytics`, `ml.stop_data_frame_analytics`, `ml.update_data_frame_analytics` APIs from **beta** to **stable**. [discrete] ==== Nodes * Added `include_unloaded_segments` parameter to `node.stats` API [discrete] ==== Searchable Snapshots * Added the `searchable_snapshots.cache_stats` **experimental** API [discrete] ==== Security * Added the `security.clear_cached_service_tokens` **beta** API * Added the `security.create_service_token` **beta** API * Added the `security.delete_service_token` **beta** API * Added the `security.get_service_accounts` **beta** API * Added the `security.get_service_credentials` **beta** API [discrete] ==== Shutdown * Added the `shutdown.delete_node` **experiemental** API * Added the `shutdown.get_node` **experimental** API * Added the `shutdown.put_node` **experimental** API [discrete] ==== Snapshots * Added the `index_details` parameter to `snapshot.get` API [discrete] ==== Text Structure * Changed the `text_structure.find_structure` API from **experimental** to **stable** [discrete] [[rn-7-12-1]] === 7.12.1 (2021-04-27) [discrete] ==== Text Structure * Changed the `text_structure.find_text_structure` API from **experimental** to **stable** [discrete] [[rn-7-12-0]] === 7.12.0 (2021-03-23) [discrete] ==== Autoscaling * Changed `autoscaling.delete_autoscaling_policy`, `autoscaling.get_autoscaling_policy`, and `autoscaling.put_autoscaling_policy` APIs from **experimental** to **stable** [discrete] ==== EQL * Added `eql.get_status` API [discrete] ==== Logash * Added `logstash.delete_pipeline`, `logstash.get_pipeline`, and `logstash.put_pipeline` APIs [discrete] ==== Machine Learning * Removed the **experimental** `ml.find_text_structure` API [discrete] ==== Searchable Snapshots * Added `storage` parameter to the `searchable_snapshots.mount` API * Added `level` parameter to the `searchable_snapshots.stats` API [discrete] ==== Search * Added the `min_compatible_shard_node` parameter to `search()` [discrete] ==== Text Structure * Added **experimental** `text_structure.find_text_structure` API [discrete] [[rn-7-11-0]] === 7.11.0 (2021-02-10) * Added support for 7.11 APIs. * Added the `X-Elastic-Client-Meta` HTTP header and the `meta_header` parameter for controlling the header (https://github.com/elastic/elasticsearch-py/pull/1473[#1473]). * Added `ElasticsearchWarning` which is raised when the `Warning` HTTP header is returned from {es}. `ElasticsearchDeprecationWarning` is now an alias for this warning type (https://github.com/elastic/elasticsearch-py/pull/1495[#1495]). [discrete] [[rn-7-10-1]] === 7.10.1 (2020-12-09) * Fixed issue where the Scan helper would fail if a `scroll` response returned without a value for `_shards.skipped` (https://github.com/elastic/elasticsearch-py/pull/1451[#1451]). * Fixed handling of IPv6 hosts with a port in the computed `Connection.host` property (https://github.com/elastic/elasticsearch-py/pull/1460[#1460]). * Fixed documented task management API stability, should have been as "experimental" (https://github.com/elastic/elasticsearch-py/pull/1471[#1471]). * Changed deprecated `collections.Mapping` in favor of `collections.abc.Mapping` for Python 3.9 (https://github.com/elastic/elasticsearch-py/pull/1443[#1443]). [discrete] [[rn-7-10-0]] === 7.10.0 (2020-11-11) * Added support for {es} 7.10 APIs. * Added basic type stubs for static type checking and IDE auto-complete of API parameters (https://github.com/elastic/elasticsearch-py/pull/1297[#1297], https://github.com/elastic/elasticsearch-py/pull/1406[#1406]). * Added support for https://www.elastic.co/guide/en/elasticsearch/reference/current/optimistic-concurrency-control.html[`Optimistic Concurrency Control options`] (`_if_seq_no`/`_if_primary_term`) to bulk helpers (https://github.com/elastic/elasticsearch-py/pull/1387[#1387]). * Added support for passing `_source` with `"_op_type": "update"` bulk helpers (https://github.com/elastic/elasticsearch-py/pull/1387[#1387]). * Fixed bug where `Connection.log_request_failure()` call would receive the compressed HTTP body rather than uncompressed when an error is raised for `RequestsHttpConnection` (https://github.com/elastic/elasticsearch-py/pull/1394[#1394]). * Fix a typo in AsyncTransport where `sniff_timeout` was used instead of `sniffer_timeout` (https://github.com/elastic/elasticsearch-py/pull/1431[#1431]). * Removed explicit `yarl` dependency from `[async]` extra to avoid issue where pip would override `aiohttp`'s pin of `yarl`. This is not a problem if you install with `--use-feature=2020-resolver`. Users should see no changes (https://github.com/elastic/elasticsearch-py/pull/1401[#1401]). [discrete] [[rn-7-9-1]] === 7.9.1 (2020-08-19) * Fixed the import of async helpers which were not available in 7.9.0 (https://github.com/elastic/elasticsearch-py/pull/1353[#1353]). * Added support for `url_prefix` when using `AIOHttpConnection` (https://github.com/elastic/elasticsearch-py/pull/1357[#1357]). [discrete] [[rn-7-9-0]] === 7.9.0 (2020-08-18) * Added support for ES 7.9 APIs. * Fixed retries to not raise an error when `sniff_on_connection_error=True` and a `TransportError` is raised during the sniff step. Instead the retry will continue or the error that triggered the retry will be raised (https://github.com/elastic/elasticsearch-py/pull/1279[#1279], https://github.com/elastic/elasticsearch-py/pull/1326[#1326]). [discrete] [[rn-7-8-1]] === 7.8.1 (2020-07-30) * Added the `accept_enterprise` parameter to `xpack.info` API (https://github.com/elastic/elasticsearch-py/pull/1337[#1337]). [discrete] [[rn-7-8-0]] === 7.8.0 (2020-06-18) * Added support for ES 7.8 APIs. * Added support for async/await with asyncio via `AsyncElasticsearch`. See https://elasticsearch-py.readthedocs.io/en/master/async.html[documentation] on `using Asyncio with {es} (https://github.com/elastic/elasticsearch-py/pull/1232[#1232], https://github.com/elastic/elasticsearch-py/pull/1235[#1235], https://github.com/elastic/elasticsearch-py/pull/1236[#1236]). * Added async helpers `async_bulk`, `async_streaming_bulk`, `async_scan`, and `async_reindex` (https://github.com/elastic/elasticsearch-py/pull/1260[#1260]). * Updated `exists_source` API to use non-deprecated {es} API routes when `doc_type` is not specified to suppress deprecation warnings (https://github.com/elastic/elasticsearch-py/pull/1272[#1272]). [discrete] [[rn-7-7-1]] === 7.7.1 (2020-05-26) * Updated `create`, `update`, `explain`, `get_source`, and `termvectors` APIs to use non-deprecated {es} API routes when `doc_type` is not specified to suppress deprecation warnings (https://github.com/elastic/elasticsearch-py/pull/1253[#1253]). [discrete] [[rn-7-7-0]] === 7.7.0 (2020-05-13) * Added support for ES 7.7 APIs (https://github.com/elastic/elasticsearch-py/pull/1182[#1182]). * Added `ElasticsearchDeprecationWarning` which is raised when a `Warning` HTTP header is sent by {es} (https://github.com/elastic/elasticsearch-py/pull/1179[#1179]). * Added support for serializing `numpy` and `pandas` data types to `JSONSerializer` (https://github.com/elastic/elasticsearch-py/pull/1180[#1180]). * Added `certifi` as a dependency so HTTPS connections work automatically. * Fixed duplicated parameters in some API docstrings (https://github.com/elastic/elasticsearch-py/pull/1169[#1169], thanks to https://github.com/mortenhauberg[Morten Hauberg]). [discrete] [[rn-7-6-0]] === 7.6.0 (2020-03-19) * Added support for ES 7.6 APIs. * Added support for https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html#_identifying_running_tasks[`X-Opaque-Id`] to identify long-running tasks. * Added support for HTTP compression to `RequestsHttpConnection`. * Updated default setting of `http_compress` when using `cloud_id` to `True`. * Updated default setting of `sniffing` when using `cloud_id` to `False`. * Updated default port to `443` if `cloud_id` and no other port is defined on the client or within `cloud_id`. * Updated `GET` HTTP requests that contain a body to `POST` where the API allows this to fix proxies rejecting these requests. * Fix regression of `client.cluster.state()` where the default `metric` should be set to `"_all"` if an index is given (https://github.com/elastic/elasticsearch-py/pull/1143[#1143]). * Fix regression of `client.tasks.get()` without a `task_id` having similar functionality to `client.tasks.list()` This will be removed in `v8.0` of `elasticsearch-py` (https://github.com/elastic/elasticsearch-py/pull/1157[#1157]). [discrete] [[rn-7-5-1]] === 7.5.1 (2020-01-19) * All API is now auto generated. * Deprecated the `.xpack` namespace. * Update client to support ES 7.5 APIs. [discrete] [[rn-7-1-0]] === 7.1.0 (2019-11-14) * Fix sniffing with `http.publish_host`. * Fix `request_timeout` for `indices` APIs. * Allow access to `x-pack` features without `xpack` namespace. * Fix mark dead. [discrete] [[rn-7-0-5]] === 7.0.5 (2019-10-01) * Fix `verify_certs=False`. [discrete] [[rn-7-0-4]] === 7.0.4 (2019-08-22) * Fix wheel distribution. [discrete] [[rn-7-0-3]] === 7.0.3 (2019-08-21) * Remove sleep in retries. * Pass `scroll_id` through body in `scroll`. * Add `user-agent`. [discrete] [[rn-7-0-2]] === 7.0.2 (2019-05-29) * Add connection parameter for Elastic Cloud cloud_id. * ML client uses client object for _bulk_body requests. [discrete] [[rn-7-0-1]] === 7.0.1 (2019-05-19) * Use black to format the code. * Update the test matrix to only use current pythons and 7.x ES. * Blocking pool must fit thread_count. * Update client to support missing ES 7 API's and query params. [discrete] [[rn-7-0-0]] === 7.0.0 (2019-04-11) * Removed deprecated option `update_all_types`. * Using insecure SSL configuration (`verify_cert=False`) raises a warning, this can be not showed with `ssl_show_warn=False`. * Add support for 7.x APIs in {es} both xpack and oss flavors. * Ordering of parameters may have changed for some APIs compared to 6.8. Use keyword arguments instead of positional arguments to work-around this change. elasticsearch-py-7.17.6/docs/sphinx/000077500000000000000000000000001426163262700173265ustar00rootroot00000000000000elasticsearch-py-7.17.6/docs/sphinx/Makefile000066400000000000000000000152061426163262700207720ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Elasticsearch.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Elasticsearch.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Elasticsearch" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Elasticsearch" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." elasticsearch-py-7.17.6/docs/sphinx/api.rst000066400000000000000000000135341426163262700206370ustar00rootroot00000000000000.. _api: API Documentation ================= All the API calls map the raw REST api as closely as possible, including the distinction between required and optional arguments to the calls. This means that the code makes distinction between positional and keyword arguments; we, however, recommend that people **use keyword arguments for all calls for consistency and safety**. .. note:: for compatibility with the Python ecosystem we use ``from_`` instead of ``from`` and ``doc_type`` instead of ``type`` as parameter names. Global Options -------------- Some parameters are added by the client itself and can be used in all API calls. Ignore ~~~~~~ An API call is considered successful (and will return a response) if elasticsearch returns a 2XX response. Otherwise an instance of :class:`~elasticsearch.TransportError` (or a more specific subclass) will be raised. You can see other exception and error states in :ref:`exceptions`. If you do not wish an exception to be raised you can always pass in an ``ignore`` parameter with either a single status code that should be ignored or a list of them: .. code-block:: python from elasticsearch import Elasticsearch es = Elasticsearch() # ignore 400 cause by IndexAlreadyExistsException when creating an index es.indices.create(index='test-index', ignore=400) # ignore 404 and 400 es.indices.delete(index='test-index', ignore=[400, 404]) Timeout ~~~~~~~ Global timeout can be set when constructing the client (see :class:`~elasticsearch.Connection`'s ``timeout`` parameter) or on a per-request basis using ``request_timeout`` (float value in seconds) as part of any API call, this value will get passed to the ``perform_request`` method of the connection class: .. code-block:: python # only wait for 1 second, regardless of the client's default es.cluster.health(wait_for_status='yellow', request_timeout=1) .. note:: Some API calls also accept a ``timeout`` parameter that is passed to Elasticsearch server. This timeout is internal and doesn't guarantee that the request will end in the specified time. Tracking Requests with Opaque ID ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ You can enrich your requests against Elasticsearch with an identifier string, that allows you to discover this identifier in `deprecation logs `_, to support you with `identifying search slow log origin `_ or to help with `identifying running tasks `_. .. code-block:: python from elasticsearch import Elasticsearch client = Elasticsearch() # You can apply X-Opaque-Id in any API request via 'opaque_id': resp = client.get(index="test", id="1", opaque_id="request-1") .. py:module:: elasticsearch Response Filtering ~~~~~~~~~~~~~~~~~~ The ``filter_path`` parameter is used to reduce the response returned by elasticsearch. For example, to only return ``_id`` and ``_type``, do: .. code-block:: python es.search(index='test-index', filter_path=['hits.hits._id', 'hits.hits._type']) It also supports the ``*`` wildcard character to match any field or part of a field's name: .. code-block:: python es.search(index='test-index', filter_path=['hits.hits._*']) Elasticsearch ------------- .. autoclass:: Elasticsearch :members: .. py:module:: elasticsearch.client Async Search ------------ .. autoclass:: AsyncSearchClient :members: Autoscaling ----------- .. autoclass:: AutoscalingClient :members: Cat --- .. autoclass:: CatClient :members: Cross-Cluster Replication (CCR) ------------------------------- .. autoclass:: CcrClient :members: Cluster ------- .. autoclass:: ClusterClient :members: Dangling Indices ---------------- .. autoclass:: DanglingIndicesClient :members: Enrich Policies --------------- .. autoclass:: EnrichClient :members: Event Query Language (EQL) -------------------------- .. autoclass:: EqlClient :members: Snapshottable Features ---------------------- .. autoclass:: FeaturesClient :members: Fleet ----- .. autoclass:: FleetClient :members: Graph Explore ------------- .. autoclass:: GraphClient :members: Index Lifecycle Management (ILM) -------------------------------- .. autoclass:: IlmClient :members: Indices ------- .. autoclass:: IndicesClient :members: Ingest Pipelines ---------------- .. autoclass:: IngestClient :members: License ------- .. autoclass:: LicenseClient :members: Logstash -------- .. autoclass:: LogstashClient :members: Migration --------- .. autoclass:: MigrationClient :members: Machine Learning (ML) --------------------- .. autoclass:: MlClient :members: Monitoring ---------- .. autoclass:: MonitoringClient :members: Nodes ----- .. autoclass:: NodesClient :members: Rollup Indices -------------- .. autoclass:: RollupClient :members: Searchable Snapshots -------------------- .. autoclass:: SearchableSnapshotsClient :members: Security -------- .. autoclass:: SecurityClient :members: Shutdown -------- .. autoclass:: ShutdownClient :members: Snapshot Lifecycle Management (SLM) ----------------------------------- .. autoclass:: SlmClient :members: Snapshots --------- .. autoclass:: SnapshotClient :members: SQL --- .. autoclass:: SqlClient :members: TLS/SSL ------- .. autoclass:: SslClient :members: Tasks ----- .. autoclass:: TasksClient :members: Text Structure -------------- .. autoclass:: TextStructureClient :members: Transforms ---------- .. autoclass:: TransformClient :members: Watcher ------- .. autoclass:: WatcherClient :members: X-Pack ------ .. autoclass:: XPackClient :members: elasticsearch-py-7.17.6/docs/sphinx/async.rst000066400000000000000000000163251426163262700212040ustar00rootroot00000000000000Using Asyncio with Elasticsearch ================================ .. py:module:: elasticsearch Starting in ``elasticsearch-py`` v7.8.0 for Python 3.6+ the ``elasticsearch`` package supports async/await with `Asyncio `_ and `Aiohttp `_. You can either install ``aiohttp`` directly or use the ``[async]`` extra: .. code-block:: bash $ python -m pip install elasticsearch>=7.8.0 aiohttp # - OR - $ python -m pip install elasticsearch[async]>=7.8.0 .. note:: Async functionality is a new feature of this library in v7.8.0+ so `please open an issue `_ if you find an issue or have a question about async support. Getting Started with Async -------------------------- After installation all async API endpoints are available via :class:`~elasticsearch.AsyncElasticsearch` and are used in the same way as other APIs, just with an extra ``await``: .. code-block:: python import asyncio from elasticsearch import AsyncElasticsearch es = AsyncElasticsearch() async def main(): resp = await es.search( index="documents", query={"match_all": {}}, size=20, ) print(resp) loop = asyncio.get_event_loop() loop.run_until_complete(main()) All APIs that are available under the sync client are also available under the async client. ASGI Applications and Elastic APM --------------------------------- `ASGI `_ (Asynchronous Server Gateway Interface) is a new way to serve Python web applications making use of async I/O to achieve better performance. Some examples of ASGI frameworks include FastAPI, Django 3.0+, and Starlette. If you're using one of these frameworks along with Elasticsearch then you should be using :py:class:`~elasticsearch.AsyncElasticsearch` to avoid blocking the event loop with synchronous network calls for optimal performance. `Elastic APM `_ also supports tracing of async Elasticsearch queries just the same as synchronous queries. For an example on how to configure ``AsyncElasticsearch`` with a popular ASGI framework `FastAPI `_ and APM tracing there is a `pre-built example `_ in the ``examples/fastapi-apm`` directory. Frequently Asked Questions -------------------------- NameError / ImportError when importing ``AsyncElasticsearch``? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If when trying to use ``AsyncElasticsearch`` and you're receiving a ``NameError`` or ``ImportError`` you should ensure that you're running Python 3.6+ (check with ``$ python --version``) and that you have ``aiohttp`` installed in your environment (check with ``$ python -m pip freeze | grep aiohttp``). If either of the above conditions is not met then async support won't be available. What about the ``elasticsearch-async`` package? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Previously asyncio was supported separately via the `elasticsearch-async `_ package. The ``elasticsearch-async`` package has been deprecated in favor of ``AsyncElasticsearch`` provided by the ``elasticsearch`` package in v7.8 and onwards. Receiving 'Unclosed client session / connector' warning? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This warning is created by ``aiohttp`` when an open HTTP connection is garbage collected. You'll typically run into this when closing your application. To resolve the issue ensure that :meth:`~elasticsearch.AsyncElasticsearch.close` is called before the :py:class:`~elasticsearch.AsyncElasticsearch` instance is garbage collected. For example if using FastAPI that might look like this: .. code-block:: python from fastapi import FastAPI from elasticsearch import AsyncElasticsearch app = FastAPI() es = AsyncElasticsearch() # This gets called once the app is shutting down. @app.on_event("shutdown") async def app_shutdown(): await es.close() Async Helpers ------------- Async variants of all helpers are available in ``elasticsearch.helpers`` and are all prefixed with ``async_*``. You'll notice that these APIs are identical to the ones in the sync :ref:`helpers` documentation. All async helpers that accept an iterator or generator also accept async iterators and async generators. .. py:module:: elasticsearch.helpers Bulk and Streaming Bulk ~~~~~~~~~~~~~~~~~~~~~~~ .. autofunction:: async_bulk .. code-block:: python import asyncio from elasticsearch import AsyncElasticsearch from elasticsearch.helpers import async_bulk es = AsyncElasticsearch() async def gendata(): mywords = ['foo', 'bar', 'baz'] for word in mywords: yield { "_index": "mywords", "doc": {"word": word}, } async def main(): await async_bulk(es, gendata()) loop = asyncio.get_event_loop() loop.run_until_complete(main()) .. autofunction:: async_streaming_bulk .. code-block:: python import asyncio from elasticsearch import AsyncElasticsearch from elasticsearch.helpers import async_streaming_bulk es = AsyncElasticsearch() async def gendata(): mywords = ['foo', 'bar', 'baz'] for word in mywords: yield { "_index": "mywords", "word": word, } async def main(): async for ok, result in async_streaming_bulk(es, gendata()): action, result = result.popitem() if not ok: print("failed to %s document %s" % ()) loop = asyncio.get_event_loop() loop.run_until_complete(main()) Scan ~~~~ .. autofunction:: async_scan .. code-block:: python import asyncio from elasticsearch import AsyncElasticsearch from elasticsearch.helpers import async_scan es = AsyncElasticsearch() async def main(): async for doc in async_scan( client=es, query={"query": {"match": {"title": "python"}}}, index="orders-*" ): print(doc) loop = asyncio.get_event_loop() loop.run_until_complete(main()) Reindex ~~~~~~~ .. autofunction:: async_reindex API Reference ------------- .. py:module:: elasticsearch The API of :class:`~elasticsearch.AsyncElasticsearch` is nearly identical to the API of :class:`~elasticsearch.Elasticsearch` with the exception that every API call like :py:func:`~elasticsearch.AsyncElasticsearch.search` is an ``async`` function and requires an ``await`` to properly return the response body. AsyncElasticsearch ~~~~~~~~~~~~~~~~~~ .. note:: To reference Elasticsearch APIs that are namespaced like ``.indices.create()`` refer to the sync API reference. These APIs are identical between sync and async. .. autoclass:: AsyncElasticsearch :members: AsyncTransport ~~~~~~~~~~~~~~ .. autoclass:: AsyncTransport :members: AsyncConnection ~~~~~~~~~~~~~~~~~ .. autoclass:: AsyncConnection :members: AIOHttpConnection ~~~~~~~~~~~~~~~~~ .. autoclass:: AIOHttpConnection :members: elasticsearch-py-7.17.6/docs/sphinx/conf.py000066400000000000000000000204741426163262700206340ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V under one or more agreements. # Elasticsearch B.V licenses this file to you under the Apache 2.0 License. # See the LICENSE file in the project root for more information import os import datetime import elasticsearch # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest"] autoclass_content = "both" # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. project = u"Elasticsearch" copyright = u"%d, Elasticsearch B.V" % datetime.date.today().year # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # The short X.Y version. version = elasticsearch.__versionstr__ # The full version, including alpha/beta/rc tags. release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = "Elasticsearchdoc" # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ( "index", "Elasticsearch.tex", u"Elasticsearch Documentation", u"Honza Král", "manual", ) ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ("index", "elasticsearch-py", u"Elasticsearch Documentation", [u"Honza Král"], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( "index", "Elasticsearch", u"Elasticsearch Documentation", u"Honza Král", "Elasticsearch", "One line description of project.", "Miscellaneous", ) ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False elasticsearch-py-7.17.6/docs/sphinx/connection.rst000066400000000000000000000066001426163262700222210ustar00rootroot00000000000000.. _connection_api: Connection Layer API ==================== All of the classes responsible for handling the connection to the Elasticsearch cluster. The default subclasses used can be overridden by passing parameters to the :class:`~elasticsearch.Elasticsearch` class. All of the arguments to the client will be passed on to :class:`~elasticsearch.Transport`, :class:`~elasticsearch.ConnectionPool` and :class:`~elasticsearch.Connection`. For example if you wanted to use your own implementation of the :class:`~elasticsearch.ConnectionSelector` class you can just pass in the ``selector_class`` parameter. .. note:: :class:`~elasticsearch.ConnectionPool` and related options (like ``selector_class``) will only be used if more than one connection is defined. Either directly or via the :ref:`sniffing` mechanism. .. note:: Known binary format mimetypes like ``application/mapbox-vector-tile`` will return the response body as ``bytes`` instead of the usually UTF-8 encoded text. .. py:module:: elasticsearch Transport --------- .. autoclass:: Transport(hosts, connection_class=Urllib3HttpConnection, connection_pool_class=ConnectionPool, host_info_callback=construct_hosts_list, sniff_on_start=False, sniffer_timeout=None, sniff_on_connection_fail=False, serializer=JSONSerializer(), max_retries=3, ** kwargs) :members: Connection Pool --------------- .. autoclass:: ConnectionPool(connections, dead_timeout=60, selector_class=RoundRobinSelector, randomize_hosts=True, ** kwargs) :members: Connection Selector ------------------- .. autoclass:: ConnectionSelector(opts) :members: Urllib3HttpConnection (default connection_class) ------------------------------------------------ If you have complex SSL logic for connecting to Elasticsearch using an `SSLContext` object might be more helpful. You can create one natively using the python SSL library with the `create_default_context` (https://docs.python.org/3/library/ssl.html#ssl.create_default_context) method. To create an `SSLContext` object you only need to use one of cafile, capath or cadata: .. code-block:: python >>> from ssl import create_default_context >>> context = create_default_context(cafile=None, capath=None, cadata=None) * `cafile` is the path to your CA File * `capath` is the directory of a collection of CA's * `cadata` is either an ASCII string of one or more PEM-encoded certificates or a bytes-like object of DER-encoded certificates. Please note that the use of SSLContext is only available for urllib3. .. autoclass:: Urllib3HttpConnection :members: API Compatibility HTTP Header ----------------------------- The Python client can be configured to emit an HTTP header ``Accept: application/vnd.elasticsearch+json; compatible-with=7`` which signals to Elasticsearch that the client is requesting ``7.x`` version of request and response bodies. This allows for upgrading from 7.x to 8.x version of Elasticsearch without upgrading everything at once. Elasticsearch should be upgraded first after the compatibility header is configured and clients should be upgraded second. .. code-block:: python from elasticsearch import Elasticsearch client = Elasticsearch("http://...", headers={"accept": "application/vnd.elasticsearch+json; compatible-with=7"}) If you'd like to have the client emit the header without configuring ``headers`` you can use the environment variable ``ELASTIC_CLIENT_APIVERSIONING=1``. elasticsearch-py-7.17.6/docs/sphinx/exceptions.rst000066400000000000000000000012661426163262700222460ustar00rootroot00000000000000.. _exceptions: Exceptions ========== .. py:module:: elasticsearch .. autoclass:: ImproperlyConfigured .. autoclass:: ElasticsearchException .. autoclass:: SerializationError(ElasticsearchException) .. autoclass:: TransportError(ElasticsearchException) :members: .. autoclass:: ConnectionError(TransportError) .. autoclass:: ConnectionTimeout(ConnectionError) .. autoclass:: SSLError(ConnectionError) .. autoclass:: NotFoundError(TransportError) .. autoclass:: ConflictError(TransportError) .. autoclass:: RequestError(TransportError) .. autoclass:: AuthenticationException(TransportError) .. autoclass:: AuthorizationException(TransportError) .. autoclass:: UnsupportedProductError elasticsearch-py-7.17.6/docs/sphinx/helpers.rst000066400000000000000000000067441426163262700215350ustar00rootroot00000000000000.. _helpers: Helpers ======= Collection of simple helper functions that abstract some specifics of the raw API. Bulk helpers ------------ There are several helpers for the ``bulk`` API since its requirement for specific formatting and other considerations can make it cumbersome if used directly. All bulk helpers accept an instance of ``Elasticsearch`` class and an iterable ``actions`` (any iterable, can also be a generator, which is ideal in most cases since it will allow you to index large datasets without the need of loading them into memory). The items in the ``action`` iterable should be the documents we wish to index in several formats. The most common one is the same as returned by :meth:`~elasticsearch.Elasticsearch.search`, for example: .. code:: python { '_index': 'index-name', '_type': 'document', '_id': 42, '_routing': 5, 'pipeline': 'my-ingest-pipeline', '_source': { "title": "Hello World!", "body": "..." } } Alternatively, if `_source` is not present, it will pop all metadata fields from the doc and use the rest as the document data: .. code:: python { "_id": 42, "_routing": 5, "title": "Hello World!", "body": "..." } The :meth:`~elasticsearch.Elasticsearch.bulk` api accepts ``index``, ``create``, ``delete``, and ``update`` actions. Use the ``_op_type`` field to specify an action (``_op_type`` defaults to ``index``): .. code:: python { '_op_type': 'delete', '_index': 'index-name', '_type': 'document', '_id': 42, } { '_op_type': 'update', '_index': 'index-name', '_type': 'document', '_id': 42, 'doc': {'question': 'The life, universe and everything.'} } Example: ~~~~~~~~ Lets say we have an iterable of data. Lets say a list of words called ``mywords`` and we want to index those words into individual documents where the structure of the document is like ``{"word": ""}``. .. code:: python def gendata(): mywords = ['foo', 'bar', 'baz'] for word in mywords: yield { "_index": "mywords", "word": word, } bulk(es, gendata()) For a more complete and complex example please take a look at https://github.com/elastic/elasticsearch-py/blob/master/examples/bulk-ingest The :meth:`~elasticsearch.Elasticsearch.parallel_bulk` api is a wrapper around the :meth:`~elasticsearch.Elasticsearch.bulk` api to provide threading. :meth:`~elasticsearch.Elasticsearch.parallel_bulk` returns a generator which must be consumed to produce results. To see the results use: .. code:: python for success, info in parallel_bulk(...): if not success: print('A document failed:', info) If you don't care about the results, you can use deque from collections: .. code:: python from collections import deque deque(parallel_bulk(...), maxlen=0) .. note:: When reading raw json strings from a file, you can also pass them in directly (without decoding to dicts first). In that case, however, you lose the ability to specify anything (index, type, even id) on a per-record basis, all documents will just be sent to elasticsearch to be indexed as-is. .. py:module:: elasticsearch.helpers .. autofunction:: streaming_bulk .. autofunction:: parallel_bulk .. autofunction:: bulk Scan ---- .. autofunction:: scan Reindex ------- .. autofunction:: reindex elasticsearch-py-7.17.6/docs/sphinx/index.rst000066400000000000000000000337121426163262700211750ustar00rootroot00000000000000Python Elasticsearch Client =========================== Official low-level client for Elasticsearch. Its goal is to provide common ground for all Elasticsearch-related code in Python; because of this it tries to be opinion-free and very extendable. Installation ------------ Install the ``elasticsearch`` package with `pip `_: .. code-block:: console $ python -m pip install elasticsearch If your application uses async/await in Python you can install with the ``async`` extra: .. code-block:: console $ python -m pip install elasticsearch[async] Read more about `how to use asyncio with this project `_. Compatibility ------------- Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of Elasticsearch. Elasticsearch language clients are only backwards compatible with default distributions and without guarantees made. If you have a need to have multiple versions installed at the same time older versions are also released as ``elasticsearch2``, ``elasticsearch5`` and ``elasticsearch6``. Example Usage ------------- .. code-block:: python from datetime import datetime from elasticsearch import Elasticsearch es = Elasticsearch() doc = { 'author': 'kimchy', 'text': 'Elasticsearch: cool. bonsai cool.', 'timestamp': datetime.now(), } res = es.index(index="test-index", id=1, document=doc) print(res['result']) res = es.get(index="test-index", id=1) print(res['_source']) es.indices.refresh(index="test-index") res = es.search(index="test-index", query={"match_all": {}}) print("Got %d Hits:" % res['hits']['total']['value']) for hit in res['hits']['hits']: print("%(timestamp)s %(author)s: %(text)s" % hit["_source"]) Features -------- This client was designed as very thin wrapper around Elasticsearch's REST API to allow for maximum flexibility. This means that there are no opinions in this client; it also means that some of the APIs are a little cumbersome to use from Python. We have created some :ref:`helpers` to help with this issue as well as a more high level library (`elasticsearch-dsl`_) on top of this one to provide a more convenient way of working with Elasticsearch. .. _elasticsearch-dsl: https://elasticsearch-dsl.readthedocs.io/ Persistent Connections ~~~~~~~~~~~~~~~~~~~~~~ ``elasticsearch-py`` uses persistent connections inside of individual connection pools (one per each configured or sniffed node). Out of the box you can choose between two ``http`` protocol implementations. See :ref:`transports` for more information. The transport layer will create an instance of the selected connection class per node and keep track of the health of individual nodes - if a node becomes unresponsive (throwing exceptions while connecting to it) it's put on a timeout by the :class:`~elasticsearch.ConnectionPool` class and only returned to the circulation after the timeout is over (or when no live nodes are left). By default nodes are randomized before being passed into the pool and round-robin strategy is used for load balancing. You can customize this behavior by passing parameters to the :ref:`connection_api` (all keyword arguments to the :class:`~elasticsearch.Elasticsearch` class will be passed through). If what you want to accomplish is not supported you should be able to create a subclass of the relevant component and pass it in as a parameter to be used instead of the default implementation. Automatic Retries ~~~~~~~~~~~~~~~~~ If a connection to a node fails due to connection issues (raises :class:`~elasticsearch.ConnectionError`) it is considered in faulty state. It will be placed on hold for ``dead_timeout`` seconds and the request will be retried on another node. If a connection fails multiple times in a row the timeout will get progressively larger to avoid hitting a node that's, by all indication, down. If no live connection is available, the connection that has the smallest timeout will be used. By default retries are not triggered by a timeout (:class:`~elasticsearch.ConnectionTimeout`), set ``retry_on_timeout`` to ``True`` to also retry on timeouts. .. _sniffing: Sniffing ~~~~~~~~ The client can be configured to inspect the cluster state to get a list of nodes upon startup, periodically and/or on failure. See :class:`~elasticsearch.Transport` parameters for details. Some example configurations: .. code-block:: python from elasticsearch import Elasticsearch # by default we don't sniff, ever es = Elasticsearch() # you can specify to sniff on startup to inspect the cluster and load # balance across all nodes es = Elasticsearch(["seed1", "seed2"], sniff_on_start=True) # you can also sniff periodically and/or after failure: es = Elasticsearch(["seed1", "seed2"], sniff_on_start=True, sniff_on_connection_fail=True, sniffer_timeout=60) Thread safety ~~~~~~~~~~~~~ The client is thread safe and can be used in a multi threaded environment. Best practice is to create a single global instance of the client and use it throughout your application. If your application is long-running consider turning on :ref:`sniffing` to make sure the client is up to date on the cluster location. By default we allow ``urllib3`` to open up to 10 connections to each node, if your application calls for more parallelism, use the ``maxsize`` parameter to raise the limit: .. code-block:: python # allow up to 25 connections to each node es = Elasticsearch(["host1", "host2"], maxsize=25) .. note:: Since we use persistent connections throughout the client it means that the client doesn't tolerate ``fork`` very well. If your application calls for multiple processes make sure you create a fresh client after call to ``fork``. Note that Python's ``multiprocessing`` module uses ``fork`` to create new processes on POSIX systems. TLS/SSL and Authentication ~~~~~~~~~~~~~~~~~~~~~~~~~~ You can configure the client to use ``SSL`` for connecting to your elasticsearch cluster, including certificate verification and HTTP auth: .. code-block:: python from elasticsearch import Elasticsearch # you can use RFC-1738 to specify the url es = Elasticsearch(['https://user:secret@localhost:443']) # ... or specify common parameters as kwargs es = Elasticsearch( ['localhost', 'otherhost'], http_auth=('user', 'secret'), scheme="https", port=443, ) # SSL client authentication using client_cert and client_key from ssl import create_default_context context = create_default_context(cafile="path/to/cert.pem") es = Elasticsearch( ['localhost', 'otherhost'], http_auth=('user', 'secret'), scheme="https", port=443, ssl_context=context, ) .. warning:: ``elasticsearch-py`` doesn't ship with default set of root certificates. To have working SSL certificate validation you need to either specify your own as ``cafile`` or ``capath`` or ``cadata`` or install `certifi`_ which will be picked up automatically. See class :class:`~elasticsearch.Urllib3HttpConnection` for detailed description of the options. .. _certifi: http://certifiio.readthedocs.io/en/latest/ Connecting via Cloud ID ~~~~~~~~~~~~~~~~~~~~~~~ Cloud ID is an easy way to configure your client to work with your Elastic Cloud deployment. Combine the ``cloud_id`` with either ``http_auth`` or ``api_key`` to authenticate with your Elastic Cloud deployment. Using ``cloud_id`` enables TLS verification and HTTP compression by default and sets the port to ``443`` unless otherwise overwritten via the ``port`` parameter or the port value encoded within ``cloud_id``. Using Cloud ID also disables sniffing. .. code-block:: python from elasticsearch import Elasticsearch es = Elasticsearch( cloud_id="cluster-1:dXMa5Fx...", http_auth=("elastic", ""), ) API Key Authentication ~~~~~~~~~~~~~~~~~~~~~~ You can configure the client to use Elasticsearch's `API Key`_ for connecting to your cluster. Please note this authentication method has been introduced with release of Elasticsearch ``6.7.0``. .. code-block:: python from elasticsearch import Elasticsearch # you can use the api key tuple es = Elasticsearch( ['node-1', 'node-2', 'node-3'], api_key=('id', 'api_key'), ) # or you pass the base 64 encoded token es = Elasticsearch( ['node-1', 'node-2', 'node-3'], api_key='base64encoded tuple', ) .. _API Key: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html Logging ~~~~~~~ ``elasticsearch-py`` uses the standard `logging library`_ from python to define two loggers: ``elasticsearch`` and ``elasticsearch.trace``. ``elasticsearch`` is used by the client to log standard activity, depending on the log level. ``elasticsearch.trace`` can be used to log requests to the server in the form of ``curl`` commands using pretty-printed json that can then be executed from command line. Because it is designed to be shared (for example to demonstrate an issue) it also just uses ``localhost:9200`` as the address instead of the actual address of the host. If the trace logger has not been configured already it is set to `propagate=False` so it needs to be activated separately. .. _logging library: http://docs.python.org/3/library/logging.html Type Hints ~~~~~~~~~~ Starting in ``elasticsearch-py`` v7.10.0 the library now ships with `type hints`_ and supports basic static type analysis with tools like `Mypy`_ and `Pyright`_. If we write a script that has a type error like using ``request_timeout`` with a ``str`` argument instead of ``float`` and then run Mypy on the script: .. code-block:: python # script.py from elasticsearch import Elasticsearch es = Elasticsearch(...) es.search( index="test-index", request_timeout="5" # type error! ) # $ mypy script.py # script.py:5: error: Argument "request_timeout" to "search" of "Elasticsearch" has # incompatible type "str"; expected "Union[int, float, None]" # Found 1 error in 1 file (checked 1 source file) For now many parameter types for API methods aren't specific to a type (ie they are of type ``typing.Any``) but in the future they will be tightened for even better static type checking. Type hints also allow tools like your IDE to check types and provide better auto-complete functionality. .. warning:: The type hints for API methods like ``search`` don't match the function signature that can be found in the source code. Type hints represent optimal usage of the API methods. Using keyword arguments is highly recommended so all optional parameters and ``body`` are keyword-only in type hints. JetBrains PyCharm will use the warning ``Unexpected argument`` to denote that the parameter may be keyword-only. .. _type hints: https://www.python.org/dev/peps/pep-0484 .. _mypy: http://mypy-lang.org .. _pyright: https://github.com/microsoft/pyright Environment considerations -------------------------- When using the client there are several limitations of your environment that could come into play. When using an HTTP load balancer you cannot use the :ref:`sniffing` functionality - the cluster would supply the client with IP addresses to directly connect to the cluster, circumventing the load balancer. Depending on your configuration this might be something you don't want or break completely. Compression ~~~~~~~~~~~ When using capacity-constrained networks (low throughput), it may be handy to enable compression. This is especially useful when doing bulk loads or inserting large documents. This will configure compression. .. code-block:: python from elasticsearch import Elasticsearch es = Elasticsearch(hosts, http_compress=True) Compression is enabled by default when connecting to Elastic Cloud via ``cloud_id``. Customization ------------- Custom serializers ~~~~~~~~~~~~~~~~~~ By default, `JSONSerializer`_ is used to encode all outgoing requests. However, you can implement your own custom serializer .. code-block:: python from elasticsearch.serializer import JSONSerializer class SetEncoder(JSONSerializer): def default(self, obj): if isinstance(obj, set): return list(obj) if isinstance(obj, Something): return 'CustomSomethingRepresentation' return JSONSerializer.default(self, obj) es = Elasticsearch(serializer=SetEncoder()) .. _JSONSerializer: https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/serializer.py#L24 Elasticsearch-DSL ----------------- For a more high level client library with more limited scope, have a look at `elasticsearch-dsl`_ - a more pythonic library sitting on top of ``elasticsearch-py``. `elasticsearch-dsl`_ provides a more convenient and idiomatic way to write and manipulate `queries`_ by mirroring the terminology and structure of Elasticsearch JSON DSL while exposing the whole range of the DSL from Python either directly using defined classes or a queryset-like expressions. It also provides an optional `persistence layer`_ for working with documents as Python objects in an ORM-like fashion: defining mappings, retrieving and saving documents, wrapping the document data in user-defined classes. .. _elasticsearch-dsl: https://elasticsearch-dsl.readthedocs.io/ .. _queries: https://elasticsearch-dsl.readthedocs.io/en/latest/search_dsl.html .. _persistence layer: https://elasticsearch-dsl.readthedocs.io/en/latest/persistence.html#doctype Contents -------- .. toctree:: :maxdepth: 2 api exceptions async connection transports helpers Release Notes License ------- Copyright 2021 Elasticsearch B.V. Licensed under the Apache License, Version 2.0. Indices and tables ------------------ * :ref:`genindex` * :ref:`modindex` * :ref:`search` elasticsearch-py-7.17.6/docs/sphinx/transports.rst000066400000000000000000000041231426163262700222770ustar00rootroot00000000000000.. _transports: Transport classes ================= List of transport classes that can be used, simply import your choice and pass it to the constructor of :class:`~elasticsearch.Elasticsearch` as `connection_class`. Note that the :class:`~elasticsearch.connection.RequestsHttpConnection` requires ``requests`` to be installed. For example to use the ``requests``-based connection just import it and use it: .. code-block:: python from elasticsearch import Elasticsearch, RequestsHttpConnection es = Elasticsearch(connection_class=RequestsHttpConnection) The default connection class is based on ``urllib3`` which is more performant and lightweight than the optional ``requests``-based class. Only use ``RequestsHttpConnection`` if you have need of any of ``requests`` advanced features like custom auth plugins etc. Product check on first request ------------------------------ Starting in v7.14.0 the client performs a required product check before the first API call is executed. This product check allows the client to establish that it's communicating with a supported Elasticsearch cluster. The product check requires a single HTTP request to the ``info`` API. In most cases this request will succeed quickly and then no further product check HTTP requests will be sent. The product check will verify that the ``X-Elastic-Product: Elasticsearch`` HTTP header is being sent or if the ``info`` API indicates a supported distribution of Elasticsearch. If the client detects that it's not connected to a supported distribution of Elasticsearch the ``UnsupportedProductError`` exception will be raised. In previous versions of Elasticsearch the ``info`` API required additional permissions so if an authentication or authorization error is raised during the product check then an ``ElasticsearchWarning`` is raised and the client proceeds normally. .. py:module:: elasticsearch.connection Connection ---------- .. autoclass:: Connection Urllib3HttpConnection --------------------- .. autoclass:: Urllib3HttpConnection RequestsHttpConnection ---------------------- .. autoclass:: RequestsHttpConnection elasticsearch-py-7.17.6/elasticsearch/000077500000000000000000000000001426163262700176775ustar00rootroot00000000000000elasticsearch-py-7.17.6/elasticsearch/__init__.py000066400000000000000000000067471426163262700220260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # flake8: noqa from __future__ import absolute_import import logging import re import sys import warnings from ._version import __versionstr__ _major, _minor, _patch = [ int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() ] VERSION = __version__ = (_major, _minor, _patch) logger = logging.getLogger("elasticsearch") logger.addHandler(logging.NullHandler()) from .client import Elasticsearch from .connection import Connection, RequestsHttpConnection, Urllib3HttpConnection from .connection_pool import ConnectionPool, ConnectionSelector, RoundRobinSelector from .exceptions import ( AuthenticationException, AuthorizationException, ConflictError, ConnectionError, ConnectionTimeout, ElasticsearchDeprecationWarning, ElasticsearchException, ElasticsearchWarning, ImproperlyConfigured, NotFoundError, RequestError, SerializationError, SSLError, TransportError, UnsupportedProductError, ) from .serializer import JSONSerializer from .transport import Transport # Only raise one warning per deprecation message so as not # to spam up the user if the same action is done multiple times. warnings.simplefilter("default", category=ElasticsearchDeprecationWarning, append=True) __all__ = [ "Elasticsearch", "Transport", "ConnectionPool", "ConnectionSelector", "RoundRobinSelector", "JSONSerializer", "Connection", "RequestsHttpConnection", "Urllib3HttpConnection", "ImproperlyConfigured", "ElasticsearchException", "SerializationError", "TransportError", "NotFoundError", "ConflictError", "RequestError", "ConnectionError", "SSLError", "ConnectionTimeout", "AuthenticationException", "AuthorizationException", "UnsupportedProductError", "ElasticsearchWarning", "ElasticsearchDeprecationWarning", ] try: # Asyncio only supported on Python 3.6+ if sys.version_info < (3, 6): raise ImportError from ._async.client import AsyncElasticsearch from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection from ._async.transport import AsyncTransport __all__ += [ "AIOHttpConnection", "AsyncConnection", "AsyncTransport", "AsyncElasticsearch", ] except (ImportError, SyntaxError): pass # Python earlier than 3.6 is deprecated and will be removed in 8.0.0 if sys.version_info < (3, 6): warnings.warn( "Support for Python 3.5 and earlier is deprecated and will be removed " "in v8.0.0 (current instance is Python %d.%d) See https://github.com/elastic" "/elasticsearch-py/issues/1696 for details." % sys.version_info[:2], category=DeprecationWarning, stacklevel=2, ) elasticsearch-py-7.17.6/elasticsearch/__init__.pyi000066400000000000000000000052161426163262700221650ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys from typing import Tuple from .client import Elasticsearch as Elasticsearch from .connection import Connection as Connection from .connection import RequestsHttpConnection as RequestsHttpConnection from .connection import Urllib3HttpConnection as Urllib3HttpConnection from .connection_pool import ConnectionPool as ConnectionPool from .connection_pool import ConnectionSelector as ConnectionSelector from .connection_pool import RoundRobinSelector as RoundRobinSelector from .exceptions import AuthenticationException as AuthenticationException from .exceptions import AuthorizationException as AuthorizationException from .exceptions import ConflictError as ConflictError from .exceptions import ConnectionError as ConnectionError from .exceptions import ConnectionTimeout as ConnectionTimeout from .exceptions import ( ElasticsearchDeprecationWarning as ElasticsearchDeprecationWarning, ) from .exceptions import ElasticsearchException as ElasticsearchException from .exceptions import ImproperlyConfigured as ImproperlyConfigured from .exceptions import NotFoundError as NotFoundError from .exceptions import RequestError as RequestError from .exceptions import SerializationError as SerializationError from .exceptions import SSLError as SSLError from .exceptions import TransportError as TransportError from .exceptions import UnsupportedProductError as UnsupportedProductError from .serializer import JSONSerializer as JSONSerializer from .transport import Transport as Transport try: if sys.version_info < (3, 6): raise ImportError from ._async.client import AsyncElasticsearch as AsyncElasticsearch from ._async.http_aiohttp import AIOHttpConnection as AIOHttpConnection from ._async.transport import AsyncTransport as AsyncTransport except (ImportError, SyntaxError): pass VERSION: Tuple[int, int, int] __version__: Tuple[int, int, int] __versionstr__: str elasticsearch-py-7.17.6/elasticsearch/_async/000077500000000000000000000000001426163262700211535ustar00rootroot00000000000000elasticsearch-py-7.17.6/elasticsearch/_async/__init__.py000066400000000000000000000014231426163262700232640ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. elasticsearch-py-7.17.6/elasticsearch/_async/_extra_imports.py000066400000000000000000000032741426163262700245720ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # type: ignore # This file exists for the sole reason of making mypy not # complain about type issues to do with 'aiohttp' and 'yarl'. # We're in a catch-22 situation: # - If we use 'type: ignore' on 'import aiohttp' and it's not installed # mypy will complain that the annotation is unnecessary. # - If we don't use 'type: ignore' on 'import aiohttp' and it # it's not installed mypy will complain that it can't find # type hints for aiohttp. # So to make mypy happy we move all our 'extra' imports here # and add a global 'type: ignore' which mypy never complains # about being unnecessary. import aiohttp import aiohttp.client_exceptions as aiohttp_exceptions # We do this because we don't explicitly require 'yarl' # within our [async] extra any more. # See AIOHttpConnection.request() for more information why. try: import yarl except ImportError: yarl = False __all__ = ["aiohttp", "aiohttp_exceptions", "yarl"] elasticsearch-py-7.17.6/elasticsearch/_async/client/000077500000000000000000000000001426163262700224315ustar00rootroot00000000000000elasticsearch-py-7.17.6/elasticsearch/_async/client/__init__.py000066400000000000000000003137101426163262700245470ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import logging from ..transport import AsyncTransport, TransportError from .async_search import AsyncSearchClient from .autoscaling import AutoscalingClient from .cat import CatClient from .ccr import CcrClient from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .data_frame import Data_FrameClient from .deprecation import DeprecationClient from .enrich import EnrichClient from .eql import EqlClient from .features import FeaturesClient from .fleet import FleetClient from .graph import GraphClient from .ilm import IlmClient from .indices import IndicesClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient from .migration import MigrationClient from .ml import MlClient from .monitoring import MonitoringClient from .nodes import NodesClient from .remote import RemoteClient from .rollup import RollupClient from .searchable_snapshots import SearchableSnapshotsClient from .security import SecurityClient from .shutdown import ShutdownClient from .slm import SlmClient from .snapshot import SnapshotClient from .sql import SqlClient from .ssl import SslClient from .tasks import TasksClient from .text_structure import TextStructureClient from .transform import TransformClient from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params from .watcher import WatcherClient from .xpack import XPackClient logger = logging.getLogger("elasticsearch") class AsyncElasticsearch(object): """ Elasticsearch low-level client. Provides a straightforward mapping from Python to ES REST endpoints. The instance has attributes ``cat``, ``cluster``, ``indices``, ``ingest``, ``nodes``, ``snapshot`` and ``tasks`` that provide access to instances of :class:`~elasticsearch.client.CatClient`, :class:`~elasticsearch.client.ClusterClient`, :class:`~elasticsearch.client.IndicesClient`, :class:`~elasticsearch.client.IngestClient`, :class:`~elasticsearch.client.NodesClient`, :class:`~elasticsearch.client.SnapshotClient` and :class:`~elasticsearch.client.TasksClient` respectively. This is the preferred (and only supported) way to get access to those classes and their methods. You can specify your own connection class which should be used by providing the ``connection_class`` parameter:: # create connection to localhost using the ThriftConnection es = Elasticsearch(connection_class=ThriftConnection) If you want to turn on :ref:`sniffing` you have several options (described in :class:`~elasticsearch.Transport`):: # create connection that will automatically inspect the cluster to get # the list of active nodes. Start with nodes running on 'esnode1' and # 'esnode2' es = Elasticsearch( ['esnode1', 'esnode2'], # sniff before doing anything sniff_on_start=True, # refresh nodes after a node fails to respond sniff_on_connection_fail=True, # and also every 60 seconds sniffer_timeout=60 ) Different hosts can have different parameters, use a dictionary per node to specify those:: # connect to localhost directly and another node using SSL on port 443 # and an url_prefix. Note that ``port`` needs to be an int. es = Elasticsearch([ {'host': 'localhost'}, {'host': 'othernode', 'port': 443, 'url_prefix': 'es', 'use_ssl': True}, ]) If using SSL, there are several parameters that control how we deal with certificates (see :class:`~elasticsearch.Urllib3HttpConnection` for detailed description of the options):: es = Elasticsearch( ['localhost:443', 'other_host:443'], # turn on SSL use_ssl=True, # make sure we verify SSL certificates verify_certs=True, # provide a path to CA certs on disk ca_certs='/path/to/CA_certs' ) If using SSL, but don't verify the certs, a warning message is showed optionally (see :class:`~elasticsearch.Urllib3HttpConnection` for detailed description of the options):: es = Elasticsearch( ['localhost:443', 'other_host:443'], # turn on SSL use_ssl=True, # no verify SSL certificates verify_certs=False, # don't show warnings about ssl certs verification ssl_show_warn=False ) SSL client authentication is supported (see :class:`~elasticsearch.Urllib3HttpConnection` for detailed description of the options):: es = Elasticsearch( ['localhost:443', 'other_host:443'], # turn on SSL use_ssl=True, # make sure we verify SSL certificates verify_certs=True, # provide a path to CA certs on disk ca_certs='/path/to/CA_certs', # PEM formatted SSL client certificate client_cert='/path/to/clientcert.pem', # PEM formatted SSL client key client_key='/path/to/clientkey.pem' ) Alternatively you can use RFC-1738 formatted URLs, as long as they are not in conflict with other options:: es = Elasticsearch( [ 'http://user:secret@localhost:9200/', 'https://user:secret@other_host:443/production' ], verify_certs=True ) By default, `JSONSerializer `_ is used to encode all outgoing requests. However, you can implement your own custom serializer:: from elasticsearch.serializer import JSONSerializer class SetEncoder(JSONSerializer): def default(self, obj): if isinstance(obj, set): return list(obj) if isinstance(obj, Something): return 'CustomSomethingRepresentation' return JSONSerializer.default(self, obj) es = Elasticsearch(serializer=SetEncoder()) """ def __init__(self, hosts=None, transport_class=AsyncTransport, **kwargs): """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), the entire dictionary will be passed to the :class:`~elasticsearch.Connection` class as kwargs, or a string in the format of ``host[:port]`` which will be translated to a dictionary automatically. If no value is given the :class:`~elasticsearch.Connection` class defaults will be used. :arg transport_class: :class:`~elasticsearch.Transport` subclass to use. :arg kwargs: any additional arguments will be passed on to the :class:`~elasticsearch.Transport` class and, subsequently, to the :class:`~elasticsearch.Connection` instances. """ self.transport = transport_class(_normalize_hosts(hosts), **kwargs) # namespaced clients for compatibility with API names self.async_search = AsyncSearchClient(self) self.autoscaling = AutoscalingClient(self) self.cat = CatClient(self) self.ccr = CcrClient(self) self.cluster = ClusterClient(self) self.dangling_indices = DanglingIndicesClient(self) self.data_frame = Data_FrameClient(self) self.deprecation = DeprecationClient(self) self.enrich = EnrichClient(self) self.eql = EqlClient(self) self.features = FeaturesClient(self) self.fleet = FleetClient(self) self.graph = GraphClient(self) self.ilm = IlmClient(self) self.indices = IndicesClient(self) self.ingest = IngestClient(self) self.license = LicenseClient(self) self.logstash = LogstashClient(self) self.migration = MigrationClient(self) self.ml = MlClient(self) self.monitoring = MonitoringClient(self) self.nodes = NodesClient(self) self.remote = RemoteClient(self) self.rollup = RollupClient(self) self.searchable_snapshots = SearchableSnapshotsClient(self) self.security = SecurityClient(self) self.shutdown = ShutdownClient(self) self.slm = SlmClient(self) self.snapshot = SnapshotClient(self) self.sql = SqlClient(self) self.ssl = SslClient(self) self.tasks = TasksClient(self) self.text_structure = TextStructureClient(self) self.transform = TransformClient(self) self.watcher = WatcherClient(self) self.xpack = XPackClient(self) def __repr__(self): try: # get a list of all connections cons = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] return "<{cls}({cons})>".format(cls=self.__class__.__name__, cons=cons) except Exception: # probably operating on custom transport and connection_pool, ignore return super(AsyncElasticsearch, self).__repr__() async def __aenter__(self): if hasattr(self.transport, "_async_call"): await self.transport._async_call() return self async def __aexit__(self, *_): await self.close() async def close(self): """Closes the Transport and all internal connections""" await self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params( response_mimetypes=["application/json"], ) async def ping(self, params=None, headers=None): """ Returns whether the cluster is running. ``_ """ try: return await self.transport.perform_request( "HEAD", "/", params=params, headers=headers ) except TransportError: return False @query_params( response_mimetypes=["application/json"], ) async def info(self, params=None, headers=None): """ Returns basic information about the cluster. ``_ """ return await self.transport.perform_request( "GET", "/", params=params, headers=headers ) @query_params( "pipeline", "refresh", "routing", "timeout", "version", "version_type", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_name="document", ) async def create(self, index, id, body, doc_type=None, params=None, headers=None): """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. ``_ :arg index: The name of the index :arg id: Document ID :arg document: The document :arg doc_type: The type of the document :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ for param in (index, id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_create", id) else: path = _make_path(index, doc_type, id, "_create") return await self.transport.perform_request( "PUT", path, params=params, headers=headers, body=body ) @query_params( "if_primary_term", "if_seq_no", "op_type", "pipeline", "refresh", "require_alias", "routing", "timeout", "version", "version_type", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_name="document", ) async def index( self, index, body, doc_type=None, id=None, params=None, headers=None ): """ Creates or updates a document in an index. ``_ :arg index: The name of the index :arg document: The document :arg doc_type: The type of the document :arg id: Document ID :arg if_primary_term: only perform the index operation if the last operation that has changed the document has the specified primary term :arg if_seq_no: only perform the index operation if the last operation that has changed the document has the specified sequence number :arg op_type: Explicit operation type. Defaults to `index` for requests with an explicit document ID, and to `create`for requests without an explicit document ID Valid choices: index, create :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg require_alias: When true, requires destination to be an alias. Default is false :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type is None: doc_type = "_doc" return await self.transport.perform_request( "POST" if id in SKIP_IN_PATH else "PUT", _make_path(index, doc_type, id), params=params, headers=headers, body=body, ) @query_params( "_source", "_source_excludes", "_source_includes", "pipeline", "refresh", "require_alias", "routing", "timeout", "wait_for_active_shards", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) async def bulk(self, body, index=None, doc_type=None, params=None, headers=None): """ Allows to perform multiple index/update/delete operations in a single request. ``_ :arg body: The operation definition and data (action-data pairs), separated by newlines :arg index: Default index for items which don't provide one :arg doc_type: Default document type for items which don't provide one :arg _source: True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub- request :arg _source_excludes: Default list of fields to exclude from the returned _source field, can be overridden on each sub-request :arg _source_includes: Default list of fields to extract and return from the _source field, can be overridden on each sub-request :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg require_alias: Sets require_alias for all incoming documents. Defaults to unset (false) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the bulk operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_bulk"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json", "text/plain"], response_mimetypes=["application/json"], body_params=["scroll_id"], ) async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): """ Explicitly clears the search context for a scroll. ``_ :arg body: A comma-separated list of scroll IDs to clear if none was specified via the scroll_id parameter :arg scroll_id: A comma-separated list of scroll IDs to clear """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'scroll_id'.") elif scroll_id and not body: body = {"scroll_id": [scroll_id]} elif scroll_id: params["scroll_id"] = scroll_id return await self.transport.perform_request( "DELETE", "/_search/scroll", params=params, headers=headers, body=body ) @query_params( "allow_no_indices", "analyze_wildcard", "analyzer", "default_operator", "df", "expand_wildcards", "ignore_throttled", "ignore_unavailable", "lenient", "min_score", "preference", "q", "routing", "terminate_after", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def count( self, body=None, index=None, doc_type=None, params=None, headers=None ): """ Returns number of documents matching a query. ``_ :arg body: A query to restrict the results specified with the Query DSL (optional) :arg index: A comma-separated list of indices to restrict the results :arg doc_type: A comma-separated list of types to restrict the results :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg min_score: Include only documents with a specific `_score` value in the result :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg routing: A comma-separated list of specific routing values :arg terminate_after: The maximum count for each shard, upon reaching which the query execution will terminate early """ return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_count"), params=params, headers=headers, body=body, ) @query_params( "if_primary_term", "if_seq_no", "refresh", "routing", "timeout", "version", "version_type", "wait_for_active_shards", response_mimetypes=["application/json"], ) async def delete(self, index, id, doc_type=None, params=None, headers=None): """ Removes a document from the index. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document :arg if_primary_term: only perform the delete operation if the last operation that has changed the document has the specified primary term :arg if_seq_no: only perform the delete operation if the last operation that has changed the document has the specified sequence number :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the delete operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: doc_type = "_doc" return await self.transport.perform_request( "DELETE", _make_path(index, doc_type, id), params=params, headers=headers ) @query_params( "allow_no_indices", "analyze_wildcard", "analyzer", "conflicts", "default_operator", "df", "expand_wildcards", "from_", "ignore_unavailable", "lenient", "max_docs", "preference", "q", "refresh", "request_cache", "requests_per_second", "routing", "scroll", "scroll_size", "search_timeout", "search_type", "size", "slices", "sort", "stats", "terminate_after", "timeout", "version", "wait_for_active_shards", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def delete_by_query( self, index, body, doc_type=None, params=None, headers=None ): """ Deletes documents matching the provided query. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: The search definition using the Query DSL :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg conflicts: What to do when the delete by query hits version conflicts? Valid choices: abort, proceed Default: abort :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg from_: Starting offset (default: 0) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_docs: Maximum number of documents to process (default: all documents) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg refresh: Should the effected indexes be refreshed? :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg requests_per_second: The throttle for this request in sub- requests per second. -1 means no throttle. :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg scroll_size: Size on the scroll request powering the delete by query Default: 100 :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg size: Deprecated, please use `max_docs` instead :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`. Default: 1 :arg sort: A comma-separated list of : pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for shards that are unavailable. Default: 1m :arg version: Specify whether to return document version as part of a hit :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the delete by query operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) :arg wait_for_completion: Should the request should block until the delete by query is complete. Default: True """ if "from_" in params: params["from"] = params.pop("from_") for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_delete_by_query"), params=params, headers=headers, body=body, ) @query_params( "requests_per_second", response_mimetypes=["application/json"], ) async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): """ Changes the number of requests per second for a particular Delete By Query operation. ``_ :arg task_id: The task id to rethrottle :arg requests_per_second: The throttle to set on this request in floating sub-requests per second. -1 means set no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") return await self.transport.perform_request( "POST", _make_path("_delete_by_query", task_id, "_rethrottle"), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_script(self, id, params=None, headers=None): """ Deletes a script. ``_ :arg id: Script ID :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_scripts", id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "stored_fields", "version", "version_type", response_mimetypes=["application/json"], ) async def exists(self, index, id, doc_type=None, params=None, headers=None): """ Returns information about whether a document exists in an index. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document (use `_all` to fetch the first document matching the ID across all types) :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg stored_fields: A comma-separated list of stored fields to return in the response :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: doc_type = "_doc" return await self.transport.perform_request( "HEAD", _make_path(index, doc_type, id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "version", "version_type", response_mimetypes=["application/json"], ) async def exists_source(self, index, id, doc_type=None, params=None, headers=None): """ Returns information about whether a document source exists in an index. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document; deprecated and optional starting with 7.0 :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_source", id) else: path = _make_path(index, doc_type, id, "_source") return await self.transport.perform_request( "HEAD", path, params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "analyze_wildcard", "analyzer", "default_operator", "df", "lenient", "preference", "q", "routing", "stored_fields", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def explain( self, index, id, body=None, doc_type=None, params=None, headers=None ): """ Returns information about why a specific matches (or doesn't match) a query. ``_ :arg index: The name of the index :arg id: The document ID :arg body: The query definition using the Query DSL :arg doc_type: The type of the document :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg analyze_wildcard: Specify whether wildcards and prefix queries in the query string query should be analyzed (default: false) :arg analyzer: The analyzer for the query string query :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The default field for query string query (default: _all) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg routing: Specific routing value :arg stored_fields: A comma-separated list of stored fields to return in the response """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_explain", id) else: path = _make_path(index, doc_type, id, "_explain") return await self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable", "include_unmapped", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def field_caps(self, body=None, index=None, params=None, headers=None): """ Returns the information about the capabilities of fields among multiple indices. ``_ :arg body: An index filter specified with the Query DSL :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fields: A comma-separated list of field names :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_unmapped: Indicates whether unmapped fields should be included in the response. """ return await self.transport.perform_request( "POST", _make_path(index, "_field_caps"), params=params, headers=headers, body=body, ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "stored_fields", "version", "version_type", response_mimetypes=["application/json"], ) async def get(self, index, id, doc_type=None, params=None, headers=None): """ Returns a document. ``_ :arg index: Name of the index that contains the document. :arg id: Unique identifier of the document. :arg doc_type: The type of the document (use `_all` to fetch the first document matching the ID across all types) :arg _source: True or false to return the _source field or not, or a list of fields to return. :arg _source_excludes: A comma-separated list of source fields to exclude in the response. :arg _source_includes: A comma-separated list of source fields to include in the response. :arg preference: Specifies the node or shard the operation should be performed on. Random by default. :arg realtime: Boolean) If true, the request is real-time as opposed to near-real-time. Default: True :arg refresh: If true, Elasticsearch refreshes the affected shards to make this operation visible to search. If false, do nothing with refreshes. :arg routing: Target the specified primary shard. :arg stored_fields: A comma-separated list of stored fields to return in the response :arg version: Explicit version number for concurrency control. The specified version must match the current version of the document for the request to succeed. :arg version_type: Specific version type: internal, external, external_gte. Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: doc_type = "_doc" return await self.transport.perform_request( "GET", _make_path(index, doc_type, id), params=params, headers=headers ) @query_params( "master_timeout", response_mimetypes=["application/json"], ) async def get_script(self, id, params=None, headers=None): """ Returns a script. ``_ :arg id: Script ID :arg master_timeout: Specify timeout for connection to master """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_scripts", id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "version", "version_type", response_mimetypes=["application/json"], ) async def get_source(self, index, id, doc_type=None, params=None, headers=None): """ Returns the source of a document. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document; deprecated and optional starting with 7.0 :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_source", id) else: path = _make_path(index, doc_type, id, "_source") return await self.transport.perform_request( "GET", path, params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "stored_fields", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def mget(self, body, index=None, doc_type=None, params=None, headers=None): """ Allows to get multiple documents in one request. ``_ :arg body: Document identifiers; can be either `docs` (containing full document information) or `ids` (when index and type is provided in the URL. :arg index: The name of the index :arg doc_type: The type of the document :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg stored_fields: A comma-separated list of stored fields to return in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_mget"), params=params, headers=headers, body=body, ) @query_params( "ccs_minimize_roundtrips", "max_concurrent_searches", "max_concurrent_shard_requests", "pre_filter_shard_size", "rest_total_hits_as_int", "search_type", "typed_keys", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) async def msearch(self, body, index=None, doc_type=None, params=None, headers=None): """ Allows to execute several search operations in one request. ``_ :arg body: The request definitions (metadata-search request definition pairs), separated by newlines :arg index: A comma-separated list of index names to use as default :arg doc_type: A comma-separated list of document types to use as default :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg max_concurrent_searches: Controls the maximum number of concurrent searches the multi search api will execute :arg max_concurrent_shard_requests: The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests Default: 5 :arg pre_filter_shard_size: A threshold that enforces a pre- filter roundtrip to prefilter search shards based on query rewriting if the number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on its rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint. :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_msearch"), params=params, headers=headers, body=body, ) @query_params( "ccs_minimize_roundtrips", "max_concurrent_searches", "rest_total_hits_as_int", "search_type", "typed_keys", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) async def msearch_template( self, body, index=None, doc_type=None, params=None, headers=None ): """ Allows to execute several search template operations in one request. ``_ :arg body: The request definitions (metadata-search request definition pairs), separated by newlines :arg index: A comma-separated list of index names to use as default :arg doc_type: A comma-separated list of document types to use as default :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg max_concurrent_searches: Controls the maximum number of concurrent searches the multi search api will execute :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_msearch", "template"), params=params, headers=headers, body=body, ) @query_params( "field_statistics", "fields", "ids", "offsets", "payloads", "positions", "preference", "realtime", "routing", "term_statistics", "version", "version_type", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def mtermvectors( self, body=None, index=None, doc_type=None, params=None, headers=None ): """ Returns multiple termvectors in one request. ``_ :arg body: Define ids, documents, parameters or a list of parameters per document here. You must at least provide a list of document ids. See documentation. :arg index: The index in which the document resides. :arg doc_type: The type of the document. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg fields: A comma-separated list of fields to return. Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg ids: A comma-separated list of documents ids. You must define ids as parameter or set "ids" or "docs" in the request body :arg offsets: Specifies if term offsets should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg payloads: Specifies if term payloads should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg positions: Specifies if term positions should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg preference: Specify the node or shard the operation should be performed on (default: random) .Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg realtime: Specifies if requests are real-time as opposed to near-real-time (default: true). :arg routing: Specific routing value. Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg term_statistics: Specifies if total term frequency and document frequency should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ if doc_type in SKIP_IN_PATH: path = _make_path(index, "_mtermvectors") else: path = _make_path(index, doc_type, "_mtermvectors") return await self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_script(self, id, body, context=None, params=None, headers=None): """ Creates or updates a script. ``_ :arg id: Script ID :arg body: The document :arg context: Script context :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_scripts", id, context), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def rank_eval(self, body, index=None, params=None, headers=None): """ Allows to evaluate the quality of ranked search results over a set of typical search queries ``_ :arg body: The ranking evaluation search definition, including search requests, document ratings and ranking metric definition. :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", _make_path(index, "_rank_eval"), params=params, headers=headers, body=body, ) @query_params( "max_docs", "refresh", "requests_per_second", "scroll", "slices", "timeout", "wait_for_active_shards", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def reindex(self, body, params=None, headers=None): """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or fetching the documents from a remote cluster. ``_ :arg body: The search definition using the Query DSL and the prototype for the index request. :arg max_docs: Maximum number of documents to process (default: all documents) :arg refresh: Should the affected indexes be refreshed? :arg requests_per_second: The throttle to set on this request in sub-requests per second. -1 means no throttle. :arg scroll: Control how long to keep the search context alive Default: 5m :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`. Default: 1 :arg timeout: Time each individual bulk request should wait for shards that are unavailable. Default: 1m :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the reindex operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) :arg wait_for_completion: Should the request should block until the reindex is complete. Default: True """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_reindex", params=params, headers=headers, body=body ) @query_params( "requests_per_second", response_mimetypes=["application/json"], ) async def reindex_rethrottle(self, task_id, params=None, headers=None): """ Changes the number of requests per second for a particular Reindex operation. ``_ :arg task_id: The task id to rethrottle :arg requests_per_second: The throttle to set on this request in floating sub-requests per second. -1 means set no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") return await self.transport.perform_request( "POST", _make_path("_reindex", task_id, "_rethrottle"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def render_search_template( self, body=None, id=None, params=None, headers=None ): """ Allows to use the Mustache language to pre-render a search definition. ``_ :arg body: The search definition template and its params :arg id: The id of the stored search template """ return await self.transport.perform_request( "POST", _make_path("_render", "template", id), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def scripts_painless_execute(self, body=None, params=None, headers=None): """ Allows an arbitrary script to be executed and a result to be returned ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg body: The script to execute """ return await self.transport.perform_request( "POST", "/_scripts/painless/_execute", params=params, headers=headers, body=body, ) @query_params( "rest_total_hits_as_int", "scroll", "scroll_id", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=["scroll", "scroll_id"], ) async def scroll(self, body=None, scroll_id=None, params=None, headers=None): """ Allows to retrieve a large numbers of results from a single search request. ``_ :arg body: The scroll ID if not passed by URL or query parameter. :arg scroll_id: The scroll ID :arg rest_total_hits_as_int: If true, the API response’s hit.total property is returned as an integer. If false, the API response’s hit.total property is returned as an object. :arg scroll: Period to retain the search context for scrolling. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'scroll_id'.") elif scroll_id and not body: body = {"scroll_id": scroll_id} elif scroll_id: params["scroll_id"] = scroll_id return await self.transport.perform_request( "POST", "/_search/scroll", params=params, headers=headers, body=body ) @query_params( "_source", "_source_excludes", "_source_includes", "allow_no_indices", "allow_partial_search_results", "analyze_wildcard", "analyzer", "batched_reduce_size", "ccs_minimize_roundtrips", "default_operator", "df", "docvalue_fields", "expand_wildcards", "explain", "from_", "ignore_throttled", "ignore_unavailable", "lenient", "max_concurrent_shard_requests", "min_compatible_shard_node", "pre_filter_shard_size", "preference", "q", "request_cache", "rest_total_hits_as_int", "routing", "scroll", "search_type", "seq_no_primary_term", "size", "sort", "stats", "stored_fields", "suggest_field", "suggest_mode", "suggest_size", "suggest_text", "terminate_after", "timeout", "track_scores", "track_total_hits", "typed_keys", "version", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=[ "_source", "aggregations", "aggs", "collapse", "docvalue_fields", "explain", "fields", "from_", "highlight", "indices_boost", "min_score", "pit", "post_filter", "profile", "query", "rescore", "runtime_mappings", "script_fields", "search_after", "seq_no_primary_term", "size", "slice", "sort", "stats", "stored_fields", "suggest", "terminate_after", "timeout", "track_scores", "track_total_hits", "version", ], ) async def search( self, body=None, index=None, doc_type=None, params=None, headers=None ): """ Returns results matching a query. ``_ :arg body: The search definition using the Query DSL :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg _source: Indicates which source fields are returned for matching documents. These fields are returned in the hits._source property of the search response. :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg aggregations: :arg aggs: :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg allow_partial_search_results: Indicate if an error should be returned if there is a partial search failure or timeout Default: True :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large. Default: 512 :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg collapse: :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg docvalue_fields: Array of wildcard (*) patterns. The request returns doc values for field names matching these patterns in the hits.fields property of the response. :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: If true, returns detailed information about score computation as part of a hit. :arg fields: Array of wildcard (*) patterns. The request returns values for field names matching these patterns in the hits.fields property of the response. :arg from_: Starting document offset. By default, you cannot page through more than 10,000 hits using the from and size parameters. To page through more hits, use the search_after parameter. :arg highlight: :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg indices_boost: Boosts the _score of documents from specified indices. :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests Default: 5 :arg min_compatible_shard_node: The minimum compatible version that all shards involved in search should have for this request to be successful :arg min_score: Minimum _score for matching documents. Documents with a lower _score are not included in the search results. :arg pit: Limits the search to a point in time (PIT). If you provide a PIT, you cannot specify an in the request path. :arg post_filter: :arg pre_filter_shard_size: A threshold that enforces a pre- filter roundtrip to prefilter search shards based on query rewriting if the number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on its rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint. :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg profile: :arg q: Query in the Lucene query string syntax :arg query: Defines the search definition using the Query DSL. :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg rescore: :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg routing: A comma-separated list of specific routing values :arg runtime_mappings: Defines one or more runtime fields in the search request. These fields take precedence over mapped fields with the same name. :arg script_fields: Retrieve a script evaluation (based on different fields) for each hit. :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_after: :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg seq_no_primary_term: If true, returns sequence number and primary term of the last modification of each hit. See Optimistic concurrency control. :arg size: The number of hits to return. By default, you cannot page through more than 10,000 hits using the from and size parameters. To page through more hits, use the search_after parameter. :arg slice: :arg sort: :arg stats: Stats groups to associate with the search. Each group maintains a statistics aggregation for its associated searches. You can retrieve these stats using the indices stats API. :arg stored_fields: List of stored fields to return as part of a hit. If no fields are specified, no stored fields are included in the response. If this field is specified, the _source parameter defaults to false. You can pass _source: true to return both source fields and stored fields in the search response. :arg suggest: :arg suggest_field: Specifies which field to use for suggestions. :arg suggest_mode: Specify suggest mode Valid choices: missing, popular, always Default: missing :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned. :arg terminate_after: Maximum number of documents to collect for each shard. If a query reaches this limit, Elasticsearch terminates the query early. Elasticsearch collects documents before sorting. Defaults to 0, which does not terminate query execution early. :arg timeout: Specifies the period of time to wait for a response from each shard. If no response is received before the timeout expires, the request fails and returns an error. Defaults to no timeout. :arg track_scores: If true, calculate and return document scores, even if the scores are not used for sorting. :arg track_total_hits: Number of hits matching the query to count accurately. If true, the exact number of hits is returned at the cost of some performance. If false, the response does not include the total number of hits matching the query. Defaults to 10,000 hits. :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response :arg version: If true, returns document version as part of a hit. """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_search"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", "preference", "routing", response_mimetypes=["application/json"], ) async def search_shards(self, index=None, params=None, headers=None): """ Returns information about the indices and shards that a search request would be executed against. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg routing: Specific routing value """ return await self.transport.perform_request( "GET", _make_path(index, "_search_shards"), params=params, headers=headers ) @query_params( "allow_no_indices", "ccs_minimize_roundtrips", "expand_wildcards", "explain", "ignore_throttled", "ignore_unavailable", "preference", "profile", "rest_total_hits_as_int", "routing", "scroll", "search_type", "typed_keys", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def search_template( self, body, index=None, doc_type=None, params=None, headers=None ): """ Allows to use the Mustache language to pre-render a search definition. ``_ :arg body: The search definition template and its params :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg profile: Specify whether to profile the query execution :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_search", "template"), params=params, headers=headers, body=body, ) @query_params( "field_statistics", "fields", "offsets", "payloads", "positions", "preference", "realtime", "routing", "term_statistics", "version", "version_type", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def termvectors( self, index, body=None, doc_type=None, id=None, params=None, headers=None ): """ Returns information and statistics about terms in the fields of a particular document. ``_ :arg index: The index in which the document resides. :arg body: Define parameters and or supply a document to get termvectors for. See documentation. :arg doc_type: The type of the document. :arg id: The id of the document, when not specified a doc param should be supplied. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Default: True :arg fields: A comma-separated list of fields to return. :arg offsets: Specifies if term offsets should be returned. Default: True :arg payloads: Specifies if term payloads should be returned. Default: True :arg positions: Specifies if term positions should be returned. Default: True :arg preference: Specify the node or shard the operation should be performed on (default: random). :arg realtime: Specifies if request is real-time as opposed to near-real-time (default: true). :arg routing: Specific routing value. :arg term_statistics: Specifies if total term frequency and document frequency should be returned. :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_termvectors", id) else: path = _make_path(index, doc_type, id, "_termvectors") return await self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "_source", "_source_excludes", "_source_includes", "if_primary_term", "if_seq_no", "lang", "refresh", "require_alias", "retry_on_conflict", "routing", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=[ "_source", "detect_noop", "doc", "doc_as_upsert", "script", "scripted_upsert", "upsert", ], body_required=True, ) async def update(self, index, id, body, doc_type=None, params=None, headers=None): """ Updates a document with a script or partial document. ``_ :arg index: The name of the index :arg id: Document ID :arg body: The request definition requires either `script` or partial `doc` :arg doc_type: The type of the document :arg _source: Set to false to disable source retrieval. You can also specify a comma-separated list of the fields you want to retrieve. :arg _source_excludes: Specify the source fields you want to exclude. :arg _source_includes: Specify the source fields you want to retrieve. :arg detect_noop: Set to false to disable setting 'result' in the response to 'noop' if no change to the document occurred. :arg doc: A partial update to an existing document. :arg doc_as_upsert: Set to true to use the contents of 'doc' as the value of 'upsert' :arg if_primary_term: Only perform the operation if the document has this primary term. :arg if_seq_no: Only perform the operation if the document has this sequence number. :arg lang: The script language. Default: painless :arg refresh: If 'true', Elasticsearch refreshes the affected shards to make this operation visible to search, if 'wait_for' then wait for a refresh to make this operation visible to search, if 'false' do nothing with refreshes. Valid choices: true, false, wait_for Default: false :arg require_alias: If true, the destination must be an index alias. :arg retry_on_conflict: Specify how many times should the operation be retried when a conflict occurs. :arg routing: Custom value used to route operations to a specific shard. :arg script: Script to execute to update the document. :arg scripted_upsert: Set to true to execute the script whether or not the document exists. :arg timeout: Period to wait for dynamic mapping updates and active shards. This guarantees Elasticsearch waits for at least the timeout before failing. The actual wait time could be longer, particularly when multiple waits occur. Default: 1m :arg upsert: If the document does not already exist, the contents of 'upsert' are inserted as a new document. If the document exists, the 'script' is executed. :arg wait_for_active_shards: The number of shard copies that must be active before proceeding with the operations. Set to 'all' or any positive integer up to the total number of shards in the index (number_of_replicas+1). Defaults to 1 meaning the primary shard. Default: 1 """ for param in (index, id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_update", id) else: path = _make_path(index, doc_type, id, "_update") return await self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "allow_no_indices", "analyze_wildcard", "analyzer", "conflicts", "default_operator", "df", "expand_wildcards", "from_", "ignore_unavailable", "lenient", "max_docs", "pipeline", "preference", "q", "refresh", "request_cache", "requests_per_second", "routing", "scroll", "scroll_size", "search_timeout", "search_type", "size", "slices", "sort", "stats", "terminate_after", "timeout", "version", "version_type", "wait_for_active_shards", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_by_query( self, index, body=None, doc_type=None, params=None, headers=None ): """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: The search definition using the Query DSL :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg conflicts: What to do when the update by query hits version conflicts? Valid choices: abort, proceed Default: abort :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg from_: Starting offset (default: 0) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_docs: Maximum number of documents to process (default: all documents) :arg pipeline: Ingest pipeline to set on index requests made by this action. (default: none) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg refresh: Should the affected indexes be refreshed? :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg requests_per_second: The throttle to set on this request in sub-requests per second. -1 means no throttle. :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg scroll_size: Size on the scroll request powering the update by query Default: 100 :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg size: Deprecated, please use `max_docs` instead :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`. Default: 1 :arg sort: A comma-separated list of : pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for shards that are unavailable. Default: 1m :arg version: Specify whether to return document version as part of a hit :arg version_type: Should the document increment the version number (internal) on hit or not (reindex) :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the update by query operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) :arg wait_for_completion: Should the request should block until the update by query operation is complete. Default: True """ if "from_" in params: params["from"] = params.pop("from_") if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_update_by_query"), params=params, headers=headers, body=body, ) @query_params( "requests_per_second", response_mimetypes=["application/json"], ) async def update_by_query_rethrottle(self, task_id, params=None, headers=None): """ Changes the number of requests per second for a particular Update By Query operation. ``_ :arg task_id: The task id to rethrottle :arg requests_per_second: The throttle to set on this request in floating sub-requests per second. -1 means set no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") return await self.transport.perform_request( "POST", _make_path("_update_by_query", task_id, "_rethrottle"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_script_context(self, params=None, headers=None): """ Returns all script contexts. ``_ """ return await self.transport.perform_request( "GET", "/_script_context", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_script_languages(self, params=None, headers=None): """ Returns available script types, languages and contexts ``_ """ return await self.transport.perform_request( "GET", "/_script_language", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def close_point_in_time(self, body=None, params=None, headers=None): """ Close a point in time ``_ :arg body: a point-in-time id to close """ return await self.transport.perform_request( "DELETE", "/_pit", params=params, headers=headers, body=body ) @query_params( "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing", response_mimetypes=["application/json"], ) async def open_point_in_time(self, index, params=None, headers=None): """ Open a point in time that can be used in subsequent searches ``_ :arg index: A comma-separated list of index names to open point in time; use `_all` or empty string to perform the operation on all indices :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg keep_alive: Specific the time to live for the point in time :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg routing: Specific routing value """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_pit"), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def terms_enum(self, index, body=None, params=None, headers=None): """ The terms enum API can be used to discover terms in the index that begin with the provided string. It is designed for low-latency look-ups used in auto- complete scenarios. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: field name, string which is the prefix expected in matching terms, timeout and size for max number of results """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_terms_enum"), params=params, headers=headers, body=body, ) @query_params( "exact_bounds", "extent", "grid_precision", "grid_type", "size", request_mimetypes=["application/json"], response_mimetypes=["application/vnd.mapbox-vector-tile"], body_params=[ "aggs", "exact_bounds", "extent", "fields", "grid_precision", "grid_type", "query", "runtime_mappings", "size", "sort", ], ) async def search_mvt( self, index, field, zoom, x, y, body=None, params=None, headers=None ): """ Searches a vector tile for geospatial values. Returns results as a binary Mapbox vector tile. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: Comma-separated list of data streams, indices, or aliases to search :arg field: Field containing geospatial data to return :arg zoom: Zoom level for the vector tile to search :arg x: X coordinate for the vector tile to search :arg y: Y coordinate for the vector tile to search :arg body: Search request body. :arg aggs: Sub-aggregations for the geotile_grid. Supports the following aggregation types: - avg - cardinality - max - min - sum :arg exact_bounds: If false, the meta layer’s feature is the bounding box of the tile. If true, the meta layer’s feature is a bounding box resulting from a geo_bounds aggregation. The aggregation runs on values that intersect the // tile with wrap_longitude set to false. The resulting bounding box may be larger than the vector tile. :arg extent: Size, in pixels, of a side of the tile. Vector tiles are square with equal sides. :arg fields: Fields to return in the `hits` layer. Supports wildcards (`*`). This parameter does not support fields with array values. Fields with array values may return inconsistent results. :arg grid_precision: Additional zoom levels available through the aggs layer. For example, if is 7 and grid_precision is 8, you can zoom in up to level 15. Accepts 0-8. If 0, results don’t include the aggs layer. :arg grid_type: Determines the geometry type for features in the aggs layer. In the aggs layer, each feature represents a geotile_grid cell. If 'grid' each feature is a Polygon of the cells bounding box. If 'point' each feature is a Point that is the centroid of the cell. :arg query: Query DSL used to filter documents for the search. :arg runtime_mappings: Defines one or more runtime fields in the search request. These fields take precedence over mapped fields with the same name. :arg size: Maximum number of features to return in the hits layer. Accepts 0-10000. If 0, results don’t include the hits layer. :arg sort: Sorts features in the hits layer. By default, the API calculates a bounding box for each feature. It sorts features based on this box’s diagonal length, from longest to shortest. """ for param in (index, field, zoom, x, y): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path(index, "_mvt", field, zoom, x, y), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/__init__.pyi000066400000000000000000001500741426163262700247220ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import logging from typing import ( Any, Collection, Dict, List, Mapping, MutableMapping, Optional, Sequence, Tuple, Type, Union, ) from typing_extensions import Literal from ..transport import AsyncTransport from .async_search import AsyncSearchClient from .autoscaling import AutoscalingClient from .cat import CatClient from .ccr import CcrClient from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .enrich import EnrichClient from .eql import EqlClient from .features import FeaturesClient from .graph import GraphClient from .ilm import IlmClient from .indices import IndicesClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient from .migration import MigrationClient from .ml import MlClient from .monitoring import MonitoringClient from .nodes import NodesClient from .remote import RemoteClient from .rollup import RollupClient from .searchable_snapshots import SearchableSnapshotsClient from .security import SecurityClient from .shutdown import ShutdownClient from .slm import SlmClient from .snapshot import SnapshotClient from .sql import SqlClient from .ssl import SslClient from .tasks import TasksClient from .text_structure import TextStructureClient from .transform import TransformClient from .watcher import WatcherClient from .xpack import XPackClient logger: logging.Logger class AsyncElasticsearch(object): transport: AsyncTransport async_search: AsyncSearchClient autoscaling: AutoscalingClient cat: CatClient ccr: CcrClient cluster: ClusterClient dangling_indices: DanglingIndicesClient enrich: EnrichClient eql: EqlClient features: FeaturesClient graph: GraphClient ilm: IlmClient indices: IndicesClient ingest: IngestClient license: LicenseClient logstash: LogstashClient migration: MigrationClient ml: MlClient monitoring: MonitoringClient nodes: NodesClient remote: RemoteClient rollup: RollupClient searchable_snapshots: SearchableSnapshotsClient security: SecurityClient shutdown: ShutdownClient slm: SlmClient snapshot: SnapshotClient sql: SqlClient ssl: SslClient tasks: TasksClient text_structure: TextStructureClient transform: TransformClient watcher: WatcherClient xpack: XPackClient def __init__( self, hosts: Any = ..., transport_class: Type[AsyncTransport] = ..., **kwargs: Any, ) -> None: ... def __repr__(self) -> str: ... async def __aenter__(self) -> "AsyncElasticsearch": ... async def __aexit__(self, *_: Any) -> None: ... async def close(self) -> None: ... # AUTO-GENERATED-API-DEFINITIONS # async def ping( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def info( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def create( self, *, index: str, id: str, document: Any, doc_type: Optional[str] = ..., pipeline: Optional[str] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., routing: Optional[str] = ..., timeout: Optional[Union[int, str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def index( self, *, index: str, document: Any, doc_type: Optional[str] = ..., id: Optional[str] = ..., if_primary_term: Optional[int] = ..., if_seq_no: Optional[int] = ..., op_type: Optional[Union[Literal["create", "index"], str]] = ..., pipeline: Optional[str] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., require_alias: Optional[bool] = ..., routing: Optional[str] = ..., timeout: Optional[Union[int, str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def bulk( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., pipeline: Optional[Any] = ..., refresh: Optional[Any] = ..., require_alias: Optional[bool] = ..., routing: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_scroll( self, *, body: Optional[Mapping[str, Any]] = ..., scroll_id: Optional[Union[List[str], str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def count( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., min_score: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., routing: Optional[Any] = ..., terminate_after: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete( self, *, index: str, id: str, doc_type: Optional[str] = ..., if_primary_term: Optional[int] = ..., if_seq_no: Optional[int] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., routing: Optional[str] = ..., timeout: Optional[Union[int, str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_by_query( self, *, index: Any, body: Mapping[str, Any], doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., conflicts: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., from_: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., max_docs: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., refresh: Optional[bool] = ..., request_cache: Optional[bool] = ..., requests_per_second: Optional[Any] = ..., routing: Optional[Any] = ..., scroll: Optional[Any] = ..., scroll_size: Optional[Any] = ..., search_timeout: Optional[Any] = ..., search_type: Optional[Any] = ..., size: Optional[Any] = ..., slices: Optional[Any] = ..., sort: Optional[Any] = ..., stats: Optional[Any] = ..., terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., version: Optional[bool] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_by_query_rethrottle( self, *, task_id: Any, requests_per_second: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_script( self, *, id: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def exists( self, *, index: str, id: str, doc_type: Optional[str] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., preference: Optional[str] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[str] = ..., stored_fields: Optional[Union[List[str], str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def exists_source( self, *, index: Any, id: Any, doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[Any] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def explain( self, *, index: Any, id: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., lenient: Optional[bool] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., routing: Optional[Any] = ..., stored_fields: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def field_caps( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., fields: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., include_unmapped: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get( self, *, index: str, id: str, doc_type: Optional[str] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., preference: Optional[str] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[str] = ..., stored_fields: Optional[Union[List[str], str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_script( self, *, id: Any, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_source( self, *, index: Any, id: Any, doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[Any] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def mget( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[Any] = ..., stored_fields: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def msearch( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., max_concurrent_searches: Optional[Any] = ..., max_concurrent_shard_requests: Optional[Any] = ..., pre_filter_shard_size: Optional[Any] = ..., rest_total_hits_as_int: Optional[bool] = ..., search_type: Optional[Any] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def msearch_template( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., max_concurrent_searches: Optional[Any] = ..., rest_total_hits_as_int: Optional[bool] = ..., search_type: Optional[Any] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def mtermvectors( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., doc_type: Optional[Any] = ..., field_statistics: Optional[bool] = ..., fields: Optional[Any] = ..., ids: Optional[Any] = ..., offsets: Optional[bool] = ..., payloads: Optional[bool] = ..., positions: Optional[bool] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., routing: Optional[Any] = ..., term_statistics: Optional[bool] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_script( self, *, id: Any, body: Mapping[str, Any], context: Optional[Any] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def rank_eval( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., search_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def reindex( self, *, body: Mapping[str, Any], max_docs: Optional[Any] = ..., refresh: Optional[bool] = ..., requests_per_second: Optional[Any] = ..., scroll: Optional[Any] = ..., slices: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def reindex_rethrottle( self, *, task_id: Any, requests_per_second: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def render_search_template( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def scripts_painless_execute( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def scroll( self, *, body: Optional[Mapping[str, Any]] = ..., rest_total_hits_as_int: Optional[bool] = ..., scroll: Optional[Union[int, str]] = ..., scroll_id: Optional[str] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def search( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Union[List[str], str]] = ..., doc_type: Optional[Union[List[str], str]] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., aggregations: Optional[Mapping[str, Mapping[str, Any]]] = ..., aggs: Optional[Mapping[str, Mapping[str, Any]]] = ..., allow_no_indices: Optional[bool] = ..., allow_partial_search_results: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[str] = ..., batched_reduce_size: Optional[int] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., collapse: Optional[Mapping[str, Any]] = ..., default_operator: Optional[Union[Literal["and", "or"], str]] = ..., df: Optional[str] = ..., docvalue_fields: Optional[Union[List[str], str]] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., explain: Optional[bool] = ..., fields: Optional[List[Mapping[str, Any]]] = ..., from_: Optional[int] = ..., highlight: Optional[Mapping[str, Any]] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., indices_boost: Optional[List[Mapping[str, float]]] = ..., lenient: Optional[bool] = ..., max_concurrent_shard_requests: Optional[int] = ..., min_compatible_shard_node: Optional[str] = ..., min_score: Optional[float] = ..., pit: Optional[Mapping[str, Any]] = ..., post_filter: Optional[Mapping[str, Any]] = ..., pre_filter_shard_size: Optional[int] = ..., preference: Optional[str] = ..., profile: Optional[bool] = ..., q: Optional[str] = ..., query: Optional[Mapping[str, Any]] = ..., request_cache: Optional[bool] = ..., rescore: Optional[Union[List[Mapping[str, Any]], Mapping[str, Any]]] = ..., rest_total_hits_as_int: Optional[bool] = ..., routing: Optional[str] = ..., runtime_mappings: Optional[Mapping[str, Mapping[str, Any]]] = ..., script_fields: Optional[Mapping[str, Mapping[str, Any]]] = ..., scroll: Optional[Union[int, str]] = ..., search_after: Optional[List[Union[None, float, int, str]]] = ..., search_type: Optional[ Union[Literal["dfs_query_then_fetch", "query_then_fetch"], str] ] = ..., seq_no_primary_term: Optional[bool] = ..., size: Optional[int] = ..., slice: Optional[Mapping[str, Any]] = ..., sort: Optional[Union[List[str], str]] = ..., stats: Optional[List[str]] = ..., stored_fields: Optional[Union[List[str], str]] = ..., suggest: Optional[Mapping[str, Any]] = ..., suggest_field: Optional[str] = ..., suggest_mode: Optional[ Union[Literal["always", "missing", "popular"], str] ] = ..., suggest_size: Optional[int] = ..., suggest_text: Optional[str] = ..., terminate_after: Optional[int] = ..., timeout: Optional[Union[int, str]] = ..., track_scores: Optional[bool] = ..., track_total_hits: Optional[Union[bool, int]] = ..., typed_keys: Optional[bool] = ..., version: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def search_shards( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., preference: Optional[Any] = ..., routing: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def search_template( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., explain: Optional[bool] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., preference: Optional[Any] = ..., profile: Optional[bool] = ..., rest_total_hits_as_int: Optional[bool] = ..., routing: Optional[Any] = ..., scroll: Optional[Any] = ..., search_type: Optional[Any] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def termvectors( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., id: Optional[Any] = ..., field_statistics: Optional[bool] = ..., fields: Optional[Any] = ..., offsets: Optional[bool] = ..., payloads: Optional[bool] = ..., positions: Optional[bool] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., routing: Optional[Any] = ..., term_statistics: Optional[bool] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update( self, *, index: str, id: str, body: Mapping[str, Any], doc_type: Optional[str] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., detect_noop: Optional[bool] = ..., doc: Optional[Any] = ..., doc_as_upsert: Optional[bool] = ..., if_primary_term: Optional[int] = ..., if_seq_no: Optional[int] = ..., lang: Optional[str] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., require_alias: Optional[bool] = ..., retry_on_conflict: Optional[int] = ..., routing: Optional[str] = ..., script: Optional[Mapping[str, Any]] = ..., scripted_upsert: Optional[bool] = ..., timeout: Optional[Union[int, str]] = ..., upsert: Optional[Any] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_by_query( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., conflicts: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., from_: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., max_docs: Optional[Any] = ..., pipeline: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., refresh: Optional[bool] = ..., request_cache: Optional[bool] = ..., requests_per_second: Optional[Any] = ..., routing: Optional[Any] = ..., scroll: Optional[Any] = ..., scroll_size: Optional[Any] = ..., search_timeout: Optional[Any] = ..., search_type: Optional[Any] = ..., size: Optional[Any] = ..., slices: Optional[Any] = ..., sort: Optional[Any] = ..., stats: Optional[Any] = ..., terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., version: Optional[bool] = ..., version_type: Optional[bool] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_by_query_rethrottle( self, *, task_id: Any, requests_per_second: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_script_context( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_script_languages( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def close_point_in_time( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def open_point_in_time( self, *, index: Any, expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., keep_alive: Optional[Any] = ..., preference: Optional[Any] = ..., routing: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def terms_enum( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def search_mvt( self, *, index: Union[List[str], str], field: str, zoom: int, x: int, y: int, body: Optional[Mapping[str, Any]] = ..., aggs: Optional[Mapping[str, Mapping[str, Any]]] = ..., exact_bounds: Optional[bool] = ..., extent: Optional[int] = ..., fields: Optional[Union[List[str], str]] = ..., grid_precision: Optional[int] = ..., grid_type: Optional[Union[Literal["centroid", "grid", "point"], str]] = ..., query: Optional[Mapping[str, Any]] = ..., runtime_mappings: Optional[Mapping[str, Mapping[str, Any]]] = ..., size: Optional[int] = ..., sort: Optional[ Union[List[Union[Mapping[str, Any], str]], Union[Mapping[str, Any], str]] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bytes: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/async_search.py000066400000000000000000000243521426163262700254530ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class AsyncSearchClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete(self, id, params=None, headers=None): """ Deletes an async search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_async_search", id), params=params, headers=headers ) @query_params( "keep_alive", "typed_keys", "wait_for_completion_timeout", response_mimetypes=["application/json"], ) async def get(self, id, params=None, headers=None): """ Retrieves the results of a previously submitted async search request given its ID. ``_ :arg id: The async search ID :arg keep_alive: Specify the time interval in which the results (partial or final) for this search will be available :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_async_search", id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "allow_no_indices", "allow_partial_search_results", "analyze_wildcard", "analyzer", "batched_reduce_size", "default_operator", "df", "docvalue_fields", "expand_wildcards", "explain", "from_", "ignore_throttled", "ignore_unavailable", "keep_alive", "keep_on_completion", "lenient", "max_concurrent_shard_requests", "preference", "q", "request_cache", "routing", "search_type", "seq_no_primary_term", "size", "sort", "stats", "stored_fields", "suggest_field", "suggest_mode", "suggest_size", "suggest_text", "terminate_after", "timeout", "track_scores", "track_total_hits", "typed_keys", "version", "wait_for_completion_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def submit(self, body=None, index=None, params=None, headers=None): """ Executes a search request asynchronously. ``_ :arg body: The search definition using the Query DSL :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg allow_partial_search_results: Indicate if an error should be returned if there is a partial search failure or timeout Default: True :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as the granularity at which progress results will be made available. Default: 5 :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg docvalue_fields: A comma-separated list of fields to return as the docvalue representation of a field for each hit :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg from_: Starting offset (default: 0) :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg keep_alive: Update the time interval in which the results (partial or final) for this search will be available Default: 5d :arg keep_on_completion: Control whether the response should be stored in the cluster if it completed within the provided [wait_for_completion] time (default: false) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests Default: 5 :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg request_cache: Specify if request cache should be used for this request or not, defaults to true :arg routing: A comma-separated list of specific routing values :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg seq_no_primary_term: Specify whether to return sequence number and primary term of the last modification of each hit :arg size: Number of hits to return (default: 10) :arg sort: A comma-separated list of : pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg stored_fields: A comma-separated list of stored fields to return as part of a hit :arg suggest_field: Specify which field to use for suggestions :arg suggest_mode: Specify suggest mode Valid choices: missing, popular, always Default: missing :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Explicit operation timeout :arg track_scores: Whether to calculate and return scores even if they are not used for sorting :arg track_total_hits: Indicate if the number of documents that match the query should be tracked :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response :arg version: Specify whether to return document version as part of a hit :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response Default: 1s """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "POST", _make_path(index, "_async_search"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def status(self, id, params=None, headers=None): """ Retrieves the status of a previously submitted async search request given its ID. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_async_search", "status", id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/async_search.pyi000066400000000000000000000133621426163262700256230ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class AsyncSearchClient(NamespacedClient): async def delete( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get( self, *, id: Any, keep_alive: Optional[Any] = ..., typed_keys: Optional[bool] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def submit( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., allow_partial_search_results: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., batched_reduce_size: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., docvalue_fields: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., explain: Optional[bool] = ..., from_: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., keep_alive: Optional[Any] = ..., keep_on_completion: Optional[bool] = ..., lenient: Optional[bool] = ..., max_concurrent_shard_requests: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., request_cache: Optional[bool] = ..., routing: Optional[Any] = ..., search_type: Optional[Any] = ..., seq_no_primary_term: Optional[bool] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., stats: Optional[Any] = ..., stored_fields: Optional[Any] = ..., suggest_field: Optional[Any] = ..., suggest_mode: Optional[Any] = ..., suggest_size: Optional[Any] = ..., suggest_text: Optional[Any] = ..., terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., track_scores: Optional[bool] = ..., track_total_hits: Optional[bool] = ..., typed_keys: Optional[bool] = ..., version: Optional[bool] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def status( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/autoscaling.py000066400000000000000000000077151426163262700253260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class AutoscalingClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete_autoscaling_policy(self, name, params=None, headers=None): """ Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg name: the name of the autoscaling policy """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_autoscaling", "policy", name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_autoscaling_policy(self, name, params=None, headers=None): """ Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg name: the name of the autoscaling policy """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "GET", _make_path("_autoscaling", "policy", name), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_autoscaling_policy(self, name, body, params=None, headers=None): """ Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg name: the name of the autoscaling policy :arg body: the specification of the autoscaling policy """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_autoscaling", "policy", name), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def get_autoscaling_capacity(self, params=None, headers=None): """ Gets the current autoscaling capacity based on the configured autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ """ return await self.transport.perform_request( "GET", "/_autoscaling/capacity", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/autoscaling.pyi000066400000000000000000000075671426163262700255040ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class AutoscalingClient(NamespacedClient): async def delete_autoscaling_policy( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_autoscaling_policy( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_autoscaling_policy( self, *, name: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_autoscaling_capacity( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/cat.py000066400000000000000000001070421426163262700235560ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params( "expand_wildcards", "format", "h", "help", "local", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def aliases(self, name=None, params=None, headers=None): """ Shows information about currently configured aliases to indices including filter and routing infos. ``_ :arg name: A comma-separated list of alias names to return :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) @query_params( "bytes", "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def allocation(self, node_id=None, params=None, headers=None): """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "allocation", node_id), params=params, headers=headers, ) @query_params( "format", "h", "help", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def count(self, index=None, params=None, headers=None): """ Provides quick access to the document count of the entire cluster, or individual indices. ``_ :arg index: A comma-separated list of index names to limit the returned information :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers ) @query_params( "format", "h", "help", "s", "time", "ts", "v", response_mimetypes=["text/plain", "application/json"], ) async def health(self, params=None, headers=None): """ Returns a concise representation of the cluster health. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg ts: Set to false to disable timestamping Default: True :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers ) @query_params( "help", "s", response_mimetypes=["text/plain"], ) async def help(self, params=None, headers=None): """ Returns help for the Cat APIs. ``_ :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by """ return await self.transport.perform_request( "GET", "/_cat", params=params, headers=headers ) @query_params( "bytes", "expand_wildcards", "format", "h", "health", "help", "include_unloaded_segments", "local", "master_timeout", "pri", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def indices(self, index=None, params=None, headers=None): """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... ``_ :arg index: A comma-separated list of index names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg health: A health status ("green", "yellow", or "red" to filter only indices matching the specified health status Valid choices: green, yellow, red :arg help: Return help information :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg pri: Set to true to return stats only for primary shards :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def master(self, params=None, headers=None): """ Returns information about the master node. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/master", params=params, headers=headers ) @query_params( "bytes", "format", "full_id", "h", "help", "include_unloaded_segments", "local", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def nodes(self, params=None, headers=None): """ Returns basic statistics about performance of cluster nodes. ``_ :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg full_id: Return the full node ID instead of the shortened version (default: false) :arg h: Comma-separated list of column names to display :arg help: Return help information :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg local: Calculate the selected nodes using the local cluster state rather than the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers ) @query_params( "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def recovery(self, index=None, params=None, headers=None): """ Returns information about index shard recoveries, both on-going completed. ``_ :arg index: Comma-separated list or wildcard expression of index names to limit the returned information :arg active_only: If `true`, the response only includes ongoing shard recoveries :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg detailed: If `true`, the response includes detailed information about shard recoveries :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "recovery", index), params=params, headers=headers ) @query_params( "bytes", "format", "h", "help", "local", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def shards(self, index=None, params=None, headers=None): """ Provides a detailed view of shard allocation on nodes. ``_ :arg index: A comma-separated list of index names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( "bytes", "format", "h", "help", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def segments(self, index=None, params=None, headers=None): """ Provides low-level information about the segments in the shards of an index. ``_ :arg index: A comma-separated list of index names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def pending_tasks(self, params=None, headers=None): """ Returns a concise representation of the cluster pending tasks. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/pending_tasks", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "size", "v", response_mimetypes=["text/plain", "application/json"], ) async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): """ Returns cluster-wide thread pool statistics per node. By default the active, queue and rejected statistics are returned for all thread pools. ``_ :arg thread_pool_patterns: A comma-separated list of regular- expressions to filter the thread pools in the output :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg size: The multiplier in which to display values Valid choices: , k, m, g, t, p :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "thread_pool", thread_pool_patterns), params=params, headers=headers, ) @query_params( "bytes", "format", "h", "help", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def fielddata(self, fields=None, params=None, headers=None): """ Shows how much heap memory is currently being used by fielddata on every data node in the cluster. ``_ :arg fields: A comma-separated list of fields to return the fielddata size :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "fielddata", fields), params=params, headers=headers, ) @query_params( "format", "h", "help", "include_bootstrap", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def plugins(self, params=None, headers=None): """ Returns information about installed plugins across nodes node. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg include_bootstrap: Include bootstrap plugins in the response :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def nodeattrs(self, params=None, headers=None): """ Returns information about custom node attributes. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def repositories(self, params=None, headers=None): """ Returns information about snapshot repositories registered in the cluster. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/repositories", params=params, headers=headers ) @query_params( "format", "h", "help", "ignore_unavailable", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def snapshots(self, repository=None, params=None, headers=None): """ Returns all snapshots in a specific repository. ``_ :arg repository: Name of repository from which to fetch the snapshot information :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg ignore_unavailable: Set to true to ignore unavailable snapshots :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "snapshots", repository), params=params, headers=headers, ) @query_params( "actions", "detailed", "format", "h", "help", "nodes", "parent_task_id", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def tasks(self, params=None, headers=None): """ Returns information about the tasks currently executing on one or more nodes in the cluster. ``_ :arg actions: A comma-separated list of actions that should be returned. Leave empty to return all. :arg detailed: Return detailed task information (default: false) :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg nodes: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) async def templates(self, name=None, params=None, headers=None): """ Returns information about existing templates. ``_ :arg name: A pattern that returned template names must match :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) @query_params( "allow_no_match", "bytes", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def ml_data_frame_analytics(self, id=None, params=None, headers=None): """ Gets configuration and usage information about data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no configs. (This includes `_all` string or when no configs have been specified) :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "ml", "data_frame", "analytics", id), params=params, headers=headers, ) @query_params( "allow_no_datafeeds", "allow_no_match", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def ml_datafeeds(self, datafeed_id=None, params=None, headers=None): """ Gets configuration and usage information about datafeeds. ``_ :arg datafeed_id: The ID of the datafeeds stats to fetch :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "ml", "datafeeds", datafeed_id), params=params, headers=headers, ) @query_params( "allow_no_jobs", "allow_no_match", "bytes", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def ml_jobs(self, job_id=None, params=None, headers=None): """ Gets configuration and usage information about anomaly detection jobs. ``_ :arg job_id: The ID of the jobs stats to fetch :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( "GET", _make_path("_cat", "ml", "anomaly_detectors", job_id), params=params, headers=headers, ) @query_params( "allow_no_match", "bytes", "format", "from_", "h", "help", "s", "size", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def ml_trained_models(self, model_id=None, params=None, headers=None): """ Gets configuration and usage information about inference trained models. ``_ :arg model_id: The ID of the trained models stats to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no trained models. (This includes `_all` string or when no trained models have been specified) Default: True :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg from_: skips a number of trained models :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg size: specifies a max number of trained models to get Default: 100 :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_cat", "ml", "trained_models", model_id), params=params, headers=headers, ) @query_params( "allow_no_match", "format", "from_", "h", "help", "s", "size", "time", "v", response_mimetypes=["text/plain", "application/json"], ) async def transforms(self, transform_id=None, params=None, headers=None): """ Gets configuration and usage information about transforms. ``_ :arg transform_id: The id of the transform for which to get stats. '_all' or '*' implies all transforms :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg format: a short version of the Accept header, e.g. json, yaml :arg from_: skips a number of transform configs, defaults to 0 :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg size: specifies a max number of transforms to get, defaults to 100 :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_cat", "transforms", transform_id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/cat.pyi000066400000000000000000000621251426163262700237310ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class CatClient(NamespacedClient): async def aliases( self, *, name: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def allocation( self, *, node_id: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def count( self, *, index: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def health( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., ts: Optional[bool] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def help( self, *, help: Optional[bool] = ..., s: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> str: ... async def indices( self, *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., health: Optional[Any] = ..., help: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pri: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def master( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def nodes( self, *, bytes: Optional[Any] = ..., format: Optional[Any] = ..., full_id: Optional[bool] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def recovery( self, *, index: Optional[Any] = ..., active_only: Optional[bool] = ..., bytes: Optional[Any] = ..., detailed: Optional[bool] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def shards( self, *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def segments( self, *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def pending_tasks( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def thread_pool( self, *, thread_pool_patterns: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def fielddata( self, *, fields: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def plugins( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., include_bootstrap: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def nodeattrs( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def repositories( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def snapshots( self, *, repository: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def tasks( self, *, actions: Optional[Any] = ..., detailed: Optional[bool] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., nodes: Optional[Any] = ..., parent_task_id: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def templates( self, *, name: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def ml_data_frame_analytics( self, *, id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def ml_datafeeds( self, *, datafeed_id: Optional[Any] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def ml_jobs( self, *, job_id: Optional[Any] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def ml_trained_models( self, *, model_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., from_: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... async def transforms( self, *, transform_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., format: Optional[Any] = ..., from_: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/ccr.py000066400000000000000000000271541426163262700235630ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class CcrClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete_auto_follow_pattern(self, name, params=None, headers=None): """ Deletes auto-follow patterns. ``_ :arg name: The name of the auto follow pattern. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_ccr", "auto_follow", name), params=params, headers=headers, ) @query_params( "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def follow(self, index, body, params=None, headers=None): """ Creates a new follower index configured to follow the referenced leader index. ``_ :arg index: The name of the follower index :arg body: The name of the leader index and other optional ccr related parameters :arg wait_for_active_shards: Sets the number of shard copies that must be active before returning. Defaults to 0. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) Default: 0 """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path(index, "_ccr", "follow"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def follow_info(self, index, params=None, headers=None): """ Retrieves information about all follower indices, including parameters and status for each follower index ``_ :arg index: A comma-separated list of index patterns; use `_all` to perform the operation on all indices """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index, "_ccr", "info"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def follow_stats(self, index, params=None, headers=None): """ Retrieves follower stats. return shard-level stats about the following tasks associated with each shard for the specified indices. ``_ :arg index: A comma-separated list of index patterns; use `_all` to perform the operation on all indices """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index, "_ccr", "stats"), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def forget_follower(self, index, body, params=None, headers=None): """ Removes the follower retention leases from the leader. ``_ :arg index: the name of the leader index for which specified follower retention leases should be removed :arg body: the name and UUID of the follower index, the name of the cluster containing the follower index, and the alias from the perspective of that cluster for the remote cluster containing the leader index """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path(index, "_ccr", "forget_follower"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def get_auto_follow_pattern(self, name=None, params=None, headers=None): """ Gets configured auto-follow patterns. Returns the specified auto-follow pattern collection. ``_ :arg name: The name of the auto follow pattern. """ return await self.transport.perform_request( "GET", _make_path("_ccr", "auto_follow", name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def pause_follow(self, index, params=None, headers=None): """ Pauses a follower index. The follower index will not fetch any additional operations from the leader index. ``_ :arg index: The name of the follower index that should pause following its leader index. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_ccr", "pause_follow"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_auto_follow_pattern(self, name, body, params=None, headers=None): """ Creates a new named collection of auto-follow patterns against a specified remote cluster. Newly created indices on the remote cluster matching any of the specified patterns will be automatically configured as follower indices. ``_ :arg name: The name of the auto follow pattern. :arg body: The specification of the auto follow pattern """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ccr", "auto_follow", name), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def resume_follow(self, index, body=None, params=None, headers=None): """ Resumes a follower index that has been paused ``_ :arg index: The name of the follow index to resume following. :arg body: The name of the leader index and other optional ccr related parameters """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_ccr", "resume_follow"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def stats(self, params=None, headers=None): """ Gets all stats related to cross-cluster replication. ``_ """ return await self.transport.perform_request( "GET", "/_ccr/stats", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def unfollow(self, index, params=None, headers=None): """ Stops the following task associated with a follower index and removes index metadata and settings associated with cross-cluster replication. ``_ :arg index: The name of the follower index that should be turned into a regular index. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_ccr", "unfollow"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def pause_auto_follow_pattern(self, name, params=None, headers=None): """ Pauses an auto-follow pattern ``_ :arg name: The name of the auto follow pattern that should pause discovering new indices to follow. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "POST", _make_path("_ccr", "auto_follow", name, "pause"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def resume_auto_follow_pattern(self, name, params=None, headers=None): """ Resumes an auto-follow pattern that has been paused ``_ :arg name: The name of the auto follow pattern to resume discovering new indices to follow. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "POST", _make_path("_ccr", "auto_follow", name, "resume"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/ccr.pyi000066400000000000000000000246701426163262700237340ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class CcrClient(NamespacedClient): async def delete_auto_follow_pattern( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def follow( self, *, index: Any, body: Mapping[str, Any], wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def follow_info( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def follow_stats( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def forget_follower( self, *, index: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_auto_follow_pattern( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def pause_follow( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_auto_follow_pattern( self, *, name: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def resume_follow( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def unfollow( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def pause_auto_follow_pattern( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def resume_auto_follow_pattern( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/cluster.py000066400000000000000000000424541426163262700244750ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ClusterClient(NamespacedClient): @query_params( "expand_wildcards", "level", "local", "master_timeout", "timeout", "wait_for_active_shards", "wait_for_events", "wait_for_no_initializing_shards", "wait_for_no_relocating_shards", "wait_for_nodes", "wait_for_status", response_mimetypes=["application/json"], ) async def health(self, index=None, params=None, headers=None): """ Returns basic information about the health of the cluster. ``_ :arg index: Limit the information returned to a specific index :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg level: Specify the level of detail for returned information Valid choices: cluster, indices, shards Default: cluster :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Wait until the specified number of shards is active :arg wait_for_events: Wait until all currently queued events with the given priority are processed Valid choices: immediate, urgent, high, normal, low, languid :arg wait_for_no_initializing_shards: Whether to wait until there are no initializing shards in the cluster :arg wait_for_no_relocating_shards: Whether to wait until there are no relocating shards in the cluster :arg wait_for_nodes: Wait until the specified number of nodes is available :arg wait_for_status: Wait until cluster is in a specific state Valid choices: green, yellow, red """ return await self.transport.perform_request( "GET", _make_path("_cluster", "health", index), params=params, headers=headers, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) async def pending_tasks(self, params=None, headers=None): """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. ``_ :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master """ return await self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "local", "master_timeout", "wait_for_metadata_version", "wait_for_timeout", response_mimetypes=["application/json"], ) async def state(self, metric=None, index=None, params=None, headers=None): """ Returns a comprehensive information about the state of the cluster. ``_ :arg metric: Limit the information returned to the specified metrics Valid choices: _all, blocks, metadata, nodes, routing_table, routing_nodes, master_node, version :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master :arg wait_for_metadata_version: Wait for the metadata version to be equal or greater than the specified metadata version :arg wait_for_timeout: The maximum time to wait for wait_for_metadata_version before timing out """ if index and metric in SKIP_IN_PATH: metric = "_all" return await self.transport.perform_request( "GET", _make_path("_cluster", "state", metric, index), params=params, headers=headers, ) @query_params( "flat_settings", "timeout", response_mimetypes=["application/json"], ) async def stats(self, node_id=None, params=None, headers=None): """ Returns high-level overview of cluster statistics. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg flat_settings: Return settings in flat format (default: false) :arg timeout: Explicit operation timeout """ return await self.transport.perform_request( "GET", "/_cluster/stats" if node_id in SKIP_IN_PATH else _make_path("_cluster", "stats", "nodes", node_id), params=params, headers=headers, ) @query_params( "dry_run", "explain", "master_timeout", "metric", "retry_failed", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def reroute(self, body=None, params=None, headers=None): """ Allows to manually change the allocation of individual shards in the cluster. ``_ :arg body: The definition of `commands` to perform (`move`, `cancel`, `allocate`) :arg dry_run: Simulate the operation only and return the resulting state :arg explain: Return an explanation of why the commands can or cannot be executed :arg master_timeout: Explicit operation timeout for connection to master node :arg metric: Limit the information returned to the specified metrics. Defaults to all but metadata Valid choices: _all, blocks, metadata, nodes, routing_table, master_node, version :arg retry_failed: Retries allocation of shards that are blocked due to too many subsequent allocation failures :arg timeout: Explicit operation timeout """ return await self.transport.perform_request( "POST", "/_cluster/reroute", params=params, headers=headers, body=body ) @query_params( "flat_settings", "include_defaults", "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def get_settings(self, params=None, headers=None): """ Returns cluster settings. ``_ :arg flat_settings: Return settings in flat format (default: false) :arg include_defaults: Whether to return all default clusters setting. :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ return await self.transport.perform_request( "GET", "/_cluster/settings", params=params, headers=headers ) @query_params( "flat_settings", "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_settings(self, body, params=None, headers=None): """ Updates the cluster settings. ``_ :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). :arg flat_settings: Return settings in flat format (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", "/_cluster/settings", params=params, headers=headers, body=body ) @query_params( response_mimetypes=["application/json"], ) async def remote_info(self, params=None, headers=None): """ Returns the information about configured remote clusters. ``_ """ return await self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) @query_params( "include_disk_info", "include_yes_decisions", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def allocation_explain(self, body=None, params=None, headers=None): """ Provides explanations for shard allocations in the cluster. ``_ :arg body: The index, shard, and primary flag to explain. Empty means 'explain a randomly-chosen unassigned shard' :arg include_disk_info: Return information about disk usage and shard sizes (default: false) :arg include_yes_decisions: Return 'YES' decisions in explanation (default: false) """ return await self.transport.perform_request( "POST", "/_cluster/allocation/explain", params=params, headers=headers, body=body, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_component_template(self, name, params=None, headers=None): """ Deletes a component template ``_ :arg name: The name of the template :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_component_template", name), params=params, headers=headers, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) async def get_component_template(self, name=None, params=None, headers=None): """ Returns one or more component templates ``_ :arg name: The comma separated names of the component templates :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return await self.transport.perform_request( "GET", _make_path("_component_template", name), params=params, headers=headers, ) @query_params( "create", "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_component_template(self, name, body, params=None, headers=None): """ Creates or updates a component template ``_ :arg name: The name of the template :arg body: The template definition :arg create: Whether the index template should only be added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_component_template", name), params=params, headers=headers, body=body, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) async def exists_component_template(self, name, params=None, headers=None): """ Returns information about whether a particular component template exist ``_ :arg name: The name of the template :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "HEAD", _make_path("_component_template", name), params=params, headers=headers, ) @query_params( "wait_for_removal", response_mimetypes=["application/json"], ) async def delete_voting_config_exclusions(self, params=None, headers=None): """ Clears cluster voting config exclusions. ``_ :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting configuration exclusions list. Default: True """ return await self.transport.perform_request( "DELETE", "/_cluster/voting_config_exclusions", params=params, headers=headers, ) @query_params( "node_ids", "node_names", "timeout", response_mimetypes=["application/json"], ) async def post_voting_config_exclusions(self, params=None, headers=None): """ Updates the cluster voting config exclusions by node ids or node names. ``_ :arg node_ids: A comma-separated list of the persistent ids of the nodes to exclude from the voting configuration. If specified, you may not also specify ?node_names. :arg node_names: A comma-separated list of the names of the nodes to exclude from the voting configuration. If specified, you may not also specify ?node_ids. :arg timeout: Explicit operation timeout Default: 30s """ return await self.transport.perform_request( "POST", "/_cluster/voting_config_exclusions", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/cluster.pyi000066400000000000000000000337241426163262700246460ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class ClusterClient(NamespacedClient): async def health( self, *, index: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., level: Optional[Any] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_events: Optional[Any] = ..., wait_for_no_initializing_shards: Optional[bool] = ..., wait_for_no_relocating_shards: Optional[bool] = ..., wait_for_nodes: Optional[Any] = ..., wait_for_status: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def pending_tasks( self, *, local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def state( self, *, metric: Optional[Any] = ..., index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., wait_for_metadata_version: Optional[Any] = ..., wait_for_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stats( self, *, node_id: Optional[Any] = ..., flat_settings: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def reroute( self, *, body: Optional[Mapping[str, Any]] = ..., dry_run: Optional[bool] = ..., explain: Optional[bool] = ..., master_timeout: Optional[Any] = ..., metric: Optional[Any] = ..., retry_failed: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_settings( self, *, flat_settings: Optional[bool] = ..., include_defaults: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_settings( self, *, body: Mapping[str, Any], flat_settings: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def remote_info( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def allocation_explain( self, *, body: Optional[Mapping[str, Any]] = ..., include_disk_info: Optional[bool] = ..., include_yes_decisions: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_component_template( self, *, name: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_component_template( self, *, name: Optional[Any] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_component_template( self, *, name: Any, body: Mapping[str, Any], create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def exists_component_template( self, *, name: Any, local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def delete_voting_config_exclusions( self, *, wait_for_removal: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def post_voting_config_exclusions( self, *, node_ids: Optional[Any] = ..., node_names: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/dangling_indices.py000066400000000000000000000064101426163262700262650ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class DanglingIndicesClient(NamespacedClient): @query_params( "accept_data_loss", "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_dangling_index(self, index_uuid, params=None, headers=None): """ Deletes the specified dangling index ``_ :arg index_uuid: The UUID of the dangling index :arg accept_data_loss: Must be set to true in order to delete the dangling index :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") return await self.transport.perform_request( "DELETE", _make_path("_dangling", index_uuid), params=params, headers=headers, ) @query_params( "accept_data_loss", "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def import_dangling_index(self, index_uuid, params=None, headers=None): """ Imports the specified dangling index ``_ :arg index_uuid: The UUID of the dangling index :arg accept_data_loss: Must be set to true in order to import the dangling index :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") return await self.transport.perform_request( "POST", _make_path("_dangling", index_uuid), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def list_dangling_indices(self, params=None, headers=None): """ Returns all dangling indices. ``_ """ return await self.transport.perform_request( "GET", "/_dangling", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/dangling_indices.pyi000066400000000000000000000065261426163262700264460ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class DanglingIndicesClient(NamespacedClient): async def delete_dangling_index( self, *, index_uuid: Any, accept_data_loss: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def import_dangling_index( self, *, index_uuid: Any, accept_data_loss: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def list_dangling_indices( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/data_frame.py000066400000000000000000000137141426163262700250740ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class Data_FrameClient(NamespacedClient): @query_params() async def delete_data_frame_transform( self, transform_id, params=None, headers=None ): """ ``_ :arg transform_id: The id of the transform to delete """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return await self.transport.perform_request( "DELETE", _make_path("_data_frame", "transforms", transform_id), params=params, headers=headers, ) @query_params("from_", "size") async def get_data_frame_transform( self, transform_id=None, params=None, headers=None ): """ ``_ :arg transform_id: The id or comma delimited list of id expressions of the transforms to get, '_all' or '*' implies get all transforms :arg from_: skips a number of transform configs, defaults to 0 :arg size: specifies a max number of transforms to get, defaults to 100 """ return await self.transport.perform_request( "GET", _make_path("_data_frame", "transforms", transform_id), params=params, headers=headers, ) @query_params() async def get_data_frame_transform_stats( self, transform_id=None, params=None, headers=None ): """ ``_ :arg transform_id: The id of the transform for which to get stats. '_all' or '*' implies all transforms """ return await self.transport.perform_request( "GET", _make_path("_data_frame", "transforms", transform_id, "_stats"), params=params, headers=headers, ) @query_params() async def preview_data_frame_transform(self, body, params=None, headers=None): """ ``_ :arg body: The definition for the data_frame transform to preview """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_data_frame/transforms/_preview", params=params, headers=headers, body=body, ) @query_params() async def put_data_frame_transform( self, transform_id, body, params=None, headers=None ): """ ``_ :arg transform_id: The id of the new transform. :arg body: The data frame transform definition """ for param in (transform_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_data_frame", "transforms", transform_id), params=params, headers=headers, body=body, ) @query_params("timeout") async def start_data_frame_transform(self, transform_id, params=None, headers=None): """ ``_ :arg transform_id: The id of the transform to start :arg timeout: Controls the time to wait for the transform to start """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return await self.transport.perform_request( "POST", _make_path("_data_frame", "transforms", transform_id, "_start"), params=params, headers=headers, ) @query_params("timeout", "wait_for_completion") async def stop_data_frame_transform(self, transform_id, params=None, headers=None): """ ``_ :arg transform_id: The id of the transform to stop :arg timeout: Controls the time to wait until the transform has stopped. Default to 30 seconds :arg wait_for_completion: Whether to wait for the transform to fully stop before returning or not. Default to false """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return await self.transport.perform_request( "POST", _make_path("_data_frame", "transforms", transform_id, "_stop"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/data_frame.pyi000066400000000000000000000127161426163262700252460ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class Data_FrameClient(NamespacedClient): async def delete_data_frame_transform( self, transform_id: Any, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_data_frame_transform( self, *, transform_id: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def get_data_frame_transform_stats( self, transform_id: Optional[Any], *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def preview_data_frame_transform( self, *, body: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def put_data_frame_transform( self, transform_id: Any, *, body: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def start_data_frame_transform( self, transform_id: Any, *, timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... async def stop_data_frame_transform( self, transform_id: Any, *, timeout: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/deprecation.py000066400000000000000000000024701426163262700253030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, _make_path, query_params class DeprecationClient(NamespacedClient): @query_params() async def info(self, index=None, params=None, headers=None): """ ``_ :arg index: Index pattern """ return await self.transport.perform_request( "GET", _make_path(index, "_xpack", "migration", "deprecations"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/deprecation.pyi000066400000000000000000000032041426163262700254500ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class DeprecationClient(NamespacedClient): async def info( self, *, index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/enrich.py000066400000000000000000000103421426163262700242530ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class EnrichClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete_policy(self, name, params=None, headers=None): """ Deletes an existing enrich policy and its enrich index. ``_ :arg name: The name of the enrich policy """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_enrich", "policy", name), params=params, headers=headers, ) @query_params( "wait_for_completion", response_mimetypes=["application/json"], ) async def execute_policy(self, name, params=None, headers=None): """ Creates the enrich index for an existing enrich policy. ``_ :arg name: The name of the enrich policy :arg wait_for_completion: Should the request should block until the execution is complete. Default: True """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "PUT", _make_path("_enrich", "policy", name, "_execute"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_policy(self, name=None, params=None, headers=None): """ Gets information about an enrich policy. ``_ :arg name: A comma-separated list of enrich policy names """ return await self.transport.perform_request( "GET", _make_path("_enrich", "policy", name), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_policy(self, name, body, params=None, headers=None): """ Creates a new enrich policy. ``_ :arg name: The name of the enrich policy :arg body: The enrich policy to register """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_enrich", "policy", name), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def stats(self, params=None, headers=None): """ Gets enrich coordinator statistics and information about enrich policies that are currently executing. ``_ """ return await self.transport.perform_request( "GET", "/_enrich/_stats", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/enrich.pyi000066400000000000000000000111261426163262700244250ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class EnrichClient(NamespacedClient): async def delete_policy( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def execute_policy( self, *, name: Any, wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_policy( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_policy( self, *, name: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/eql.py000066400000000000000000000113101426163262700235600ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class EqlClient(NamespacedClient): @query_params( "keep_alive", "keep_on_completion", "wait_for_completion_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def search(self, index, body, params=None, headers=None): """ Returns results matching a query expressed in Event Query Language (EQL) ``_ :arg index: The name of the index to scope the operation :arg body: Eql request body. Use the `query` to limit the query scope. :arg keep_alive: Update the time interval in which the results (partial or final) for this search will be available Default: 5d :arg keep_on_completion: Control whether the response should be stored in the cluster if it completed within the provided [wait_for_completion] time (default: false) :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path(index, "_eql", "search"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def delete(self, id, params=None, headers=None): """ Deletes an async EQL search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_eql", "search", id), params=params, headers=headers ) @query_params( "keep_alive", "wait_for_completion_timeout", response_mimetypes=["application/json"], ) async def get(self, id, params=None, headers=None): """ Returns async results from previously executed Event Query Language (EQL) search ``_ :arg id: The async search ID :arg keep_alive: Update the time interval in which the results (partial or final) for this search will be available Default: 5d :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_eql", "search", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_status(self, id, params=None, headers=None): """ Returns the status of a previously submitted async or stored Event Query Language (EQL) search ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_eql", "search", "status", id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/eql.pyi000066400000000000000000000100611426163262700237330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class EqlClient(NamespacedClient): async def search( self, *, index: Any, body: Mapping[str, Any], keep_alive: Optional[Any] = ..., keep_on_completion: Optional[bool] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get( self, *, id: Any, keep_alive: Optional[Any] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_status( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/features.py000066400000000000000000000041321426163262700246210ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params( "master_timeout", response_mimetypes=["application/json"], ) async def get_features(self, params=None, headers=None): """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot ``_ :arg master_timeout: Explicit operation timeout for connection to master node """ return await self.transport.perform_request( "GET", "/_features", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def reset_features(self, params=None, headers=None): """ Resets the internal state of features, usually by deleting system indices ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version """ return await self.transport.perform_request( "POST", "/_features/_reset", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/features.pyi000066400000000000000000000045501426163262700247760ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class FeaturesClient(NamespacedClient): async def get_features( self, *, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def reset_features( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/fleet.py000066400000000000000000000114641426163262700241100ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, _make_path, query_params class FleetClient(NamespacedClient): @query_params( "checkpoints", "timeout", "wait_for_advance", "wait_for_index", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def global_checkpoints(self, index, params=None, headers=None): """ Returns the current global checkpoints for an index. This API is design for internal use by the fleet server project. ``_ :arg index: The name of the index. :arg checkpoints: Comma separated list of checkpoints :arg timeout: Timeout to wait for global checkpoint to advance Default: 30s :arg wait_for_advance: Whether to wait for the global checkpoint to advance past the specified current checkpoints Default: false :arg wait_for_index: Whether to wait for the target index to exist and all primary shards be active Default: false """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index, "_fleet", "global_checkpoints"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) async def msearch(self, body, index=None, params=None, headers=None): """ Multi Search API where the search will only be executed after specified checkpoints are available due to a refresh. This API is designed for internal use by the fleet server project. .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg body: The request definitions (metadata-fleet search request definition pairs), separated by newlines :arg index: The index name to use as the default """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", _make_path(index, "_fleet", "_fleet_msearch"), params=params, headers=headers, body=body, ) @query_params( "allow_partial_search_results", "wait_for_checkpoints", "wait_for_checkpoints_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def search(self, index, body=None, params=None, headers=None): """ Search API where the search will only be executed after specified checkpoints are available due to a refresh. This API is designed for internal use by the fleet server project. .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The index name to search. :arg body: The search definition using the Query DSL :arg allow_partial_search_results: Indicate if an error should be returned if there is a partial search failure or timeout Default: True :arg wait_for_checkpoints: Comma separated list of checkpoints, one per shard :arg wait_for_checkpoints_timeout: Explicit wait_for_checkpoints timeout """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_fleet", "_fleet_search"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/fleet.pyi000066400000000000000000000071031426163262700242540ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class FleetClient(NamespacedClient): async def global_checkpoints( self, *, index: Any, checkpoints: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_advance: Optional[bool] = ..., wait_for_index: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def msearch( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def search( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., allow_partial_search_results: Optional[bool] = ..., wait_for_checkpoints: Optional[Any] = ..., wait_for_checkpoints_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/graph.py000066400000000000000000000041301426163262700241020ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class GraphClient(NamespacedClient): @query_params( "routing", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def explore(self, index, body=None, doc_type=None, params=None, headers=None): """ Explore extracted and summarized information about the documents and terms in an index. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: Graph Query DSL :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg routing: Specific routing value :arg timeout: Explicit operation timeout """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_graph", "explore"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/graph.pyi000066400000000000000000000035241426163262700242610ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class GraphClient(NamespacedClient): async def explore( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., routing: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/ilm.py000066400000000000000000000211551426163262700235700ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IlmClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete_lifecycle(self, policy, params=None, headers=None): """ Deletes the specified lifecycle policy definition. A currently used policy cannot be deleted. ``_ :arg policy: The name of the index lifecycle policy """ if policy in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy'.") return await self.transport.perform_request( "DELETE", _make_path("_ilm", "policy", policy), params=params, headers=headers, ) @query_params( "only_errors", "only_managed", response_mimetypes=["application/json"], ) async def explain_lifecycle(self, index, params=None, headers=None): """ Retrieves information about the index's current lifecycle state, such as the currently executing phase, action, and step. ``_ :arg index: The name of the index to explain :arg only_errors: filters the indices included in the response to ones in an ILM error state, implies only_managed :arg only_managed: filters the indices included in the response to ones managed by ILM """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index, "_ilm", "explain"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_lifecycle(self, policy=None, params=None, headers=None): """ Returns the specified policy definition. Includes the policy version and last modified date. ``_ :arg policy: The name of the index lifecycle policy """ return await self.transport.perform_request( "GET", _make_path("_ilm", "policy", policy), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_status(self, params=None, headers=None): """ Retrieves the current index lifecycle management (ILM) status. ``_ """ return await self.transport.perform_request( "GET", "/_ilm/status", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def move_to_step(self, index, body=None, params=None, headers=None): """ Manually moves an index into the specified step and executes that step. ``_ :arg index: The name of the index whose lifecycle step is to change :arg body: The new lifecycle step to move to """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path("_ilm", "move", index), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_lifecycle(self, policy, body=None, params=None, headers=None): """ Creates a lifecycle policy ``_ :arg policy: The name of the index lifecycle policy :arg body: The lifecycle policy definition to register """ if policy in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy'.") return await self.transport.perform_request( "PUT", _make_path("_ilm", "policy", policy), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def remove_policy(self, index, params=None, headers=None): """ Removes the assigned lifecycle policy and stops managing the specified index ``_ :arg index: The name of the index to remove policy on """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_ilm", "remove"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def retry(self, index, params=None, headers=None): """ Retries executing the policy for an index that is in the ERROR step. ``_ :arg index: The name of the indices (comma-separated) whose failed lifecycle step is to be retry """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_ilm", "retry"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def start(self, params=None, headers=None): """ Start the index lifecycle management (ILM) plugin. ``_ """ return await self.transport.perform_request( "POST", "/_ilm/start", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def stop(self, params=None, headers=None): """ Halts all lifecycle management operations and stops the index lifecycle management (ILM) plugin ``_ """ return await self.transport.perform_request( "POST", "/_ilm/stop", params=params, headers=headers ) @query_params( "dry_run", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def migrate_to_data_tiers(self, body=None, params=None, headers=None): """ Migrates the indices and ILM policies away from custom node attribute allocation routing to data tiers routing ``_ :arg body: Optionally specify a legacy index template name to delete and optionally specify a node attribute name used for index shard routing (defaults to "data") :arg dry_run: If set to true it will simulate the migration, providing a way to retrieve the ILM policies and indices that need to be migrated. The default is false """ return await self.transport.perform_request( "POST", "/_ilm/migrate_to_data_tiers", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/ilm.pyi000066400000000000000000000217521426163262700237440ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class IlmClient(NamespacedClient): async def delete_lifecycle( self, *, policy: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def explain_lifecycle( self, *, index: Any, only_errors: Optional[bool] = ..., only_managed: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_lifecycle( self, *, policy: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def move_to_step( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_lifecycle( self, *, policy: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def remove_policy( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def retry( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def start( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stop( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def migrate_to_data_tiers( self, *, body: Optional[Mapping[str, Any]] = ..., dry_run: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/indices.py000066400000000000000000002366131426163262700244340ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def analyze(self, body=None, index=None, params=None, headers=None): """ Performs the analysis process on a text and return the tokens breakdown of the text. ``_ :arg body: Define analyzer/tokenizer parameters and the text on which the analysis should be performed :arg index: The name of the index to scope the operation """ return await self.transport.perform_request( "POST", _make_path(index, "_analyze"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) async def refresh(self, index=None, params=None, headers=None): """ Performs the refresh operation in one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return await self.transport.perform_request( "POST", _make_path(index, "_refresh"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "force", "ignore_unavailable", "wait_if_ongoing", response_mimetypes=["application/json"], ) async def flush(self, index=None, params=None, headers=None): """ Performs the flush operation on one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string for all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg force: Whether a flush should be forced even if it is not necessarily needed ie. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as internal) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg wait_if_ongoing: If set to true the flush operation will block until the flush can be executed if another flush operation is already executing. The default is true. If set to false the flush will be skipped iff if another flush operation is already running. """ return await self.transport.perform_request( "POST", _make_path(index, "_flush"), params=params, headers=headers ) @query_params( "include_type_name", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=["aliases", "mappings", "settings"], ) async def create(self, index, body=None, params=None, headers=None): """ Creates an index with optional settings and mappings. ``_ :arg index: The name of the index :arg body: The configuration for the index (`settings` and `mappings`) :arg aliases: :arg include_type_name: Whether a type should be expected in the body of the mappings. :arg mappings: Mapping for fields in the index. If specified, this mapping can include: - Field names - Field data types - Mapping parameters :arg master_timeout: Specify timeout for connection to master :arg settings: :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "PUT", _make_path(index), params=params, headers=headers, body=body ) @query_params( "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def clone(self, index, target, body=None, params=None, headers=None): """ Clones an index ``_ :arg index: The name of the source index to clone :arg target: The name of the target index to clone into :arg body: The configuration for the target index (`settings` and `aliases`) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the cloned index before the operation returns. """ for param in (index, target): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path(index, "_clone", target), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "include_type_name", "local", "master_timeout", response_mimetypes=["application/json"], ) async def get(self, index, params=None, headers=None): """ Returns information about one or more indices. ``_ :arg index: Comma-separated list of data streams, indices, and index aliases used to limit the request. Wildcard expressions (*) are supported. :arg allow_no_indices: Ignore if a wildcard expression resolves to no concrete indices (default: false) Default: True :arg expand_wildcards: Type of index that wildcard expressions can match. If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams. Supports comma-separated values, such as open,hidden. Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: If true, returns settings in flat format. :arg ignore_unavailable: If false, requests that target a missing index return an error. :arg include_defaults: If true, return all default settings in the response. :arg include_type_name: If true, a mapping type is expected in the body of mappings. :arg local: If true, the request retrieves information from the local node only. Defaults to false, which means information is retrieved from the master node. :arg master_timeout: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. Default: 30s """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) async def open(self, index, params=None, headers=None): """ Opens an index. ``_ :arg index: A comma separated list of indices to open :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: closed :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_open"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) async def close(self, index, params=None, headers=None): """ Closes an index. ``_ :arg index: A comma separated list of indices to close :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. Set to `index-setting` to wait according to the index setting `index.write.wait_for_active_shards`, or `all` to wait for all shards, or an integer. Defaults to `0`. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_close"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete(self, index, params=None, headers=None): """ Deletes an index. ``_ :arg index: A comma-separated list of indices to delete; use `_all` or `*` string to delete all indices :arg allow_no_indices: Ignore if a wildcard expression resolves to no concrete indices (default: false) :arg expand_wildcards: Whether wildcard expressions should get expanded to open, closed, or hidden indices Valid choices: open, closed, hidden, none, all Default: open,closed :arg ignore_unavailable: Ignore unavailable indexes (default: false) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "DELETE", _make_path(index), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", response_mimetypes=["application/json"], ) async def exists(self, index, params=None, headers=None): """ Returns information about whether a particular index exists. ``_ :arg index: A comma-separated list of index names :arg allow_no_indices: Ignore if a wildcard expression resolves to no concrete indices (default: false) :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Ignore unavailable indexes (default: false) :arg include_defaults: Whether to return all default setting for each of the indices. :arg local: Return local information, do not retrieve the state from master node (default: false) """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "HEAD", _make_path(index), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", response_mimetypes=["application/json"], ) async def exists_type(self, index, doc_type, params=None, headers=None): """ Returns information about whether a particular document type exists. (DEPRECATED) ``_ :arg index: A comma-separated list of index names; use `_all` to check the types across all indices :arg doc_type: A comma-separated list of document types to check :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ for param in (index, doc_type): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "HEAD", _make_path(index, "_mapping", doc_type), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "include_type_name", "master_timeout", "timeout", "write_index_only", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_mapping( self, body, index=None, doc_type=None, params=None, headers=None ): """ Updates the index mappings. ``_ :arg body: The mapping definition :arg index: A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices. :arg doc_type: The name of the document type :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_type_name: Whether a type should be expected in the body of the mappings. :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg write_index_only: When true, applies mappings only to the write index of an alias or data stream """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") if doc_type not in SKIP_IN_PATH and index in SKIP_IN_PATH: index = "_all" return await self.transport.perform_request( "PUT", _make_path(index, doc_type, "_mapping"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "include_type_name", "local", "master_timeout", response_mimetypes=["application/json"], ) async def get_mapping(self, index=None, doc_type=None, params=None, headers=None): """ Returns mappings for one or more indices. ``_ :arg index: A comma-separated list of index names :arg doc_type: A comma-separated list of document types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_type_name: Whether to add the type name to the response (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master """ return await self.transport.perform_request( "GET", _make_path(index, "_mapping", doc_type), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "include_defaults", "include_type_name", "local", response_mimetypes=["application/json"], ) async def get_field_mapping( self, fields, index=None, doc_type=None, params=None, headers=None ): """ Returns mapping for one or more fields. ``_ :arg fields: A comma-separated list of fields :arg index: A comma-separated list of index names :arg doc_type: A comma-separated list of document types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_defaults: Whether the default mapping values should be returned as well :arg include_type_name: Whether a type should be returned in the body of the mappings. :arg local: Return local information, do not retrieve the state from master node (default: false) """ if fields in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'fields'.") return await self.transport.perform_request( "GET", _make_path(index, "_mapping", doc_type, "field", fields), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_alias(self, index, name, body=None, params=None, headers=None): """ Creates or updates an alias. ``_ :arg index: A comma-separated list of index names the alias should point to (supports wildcards); use `_all` to perform the operation on all indices. :arg name: The name of the alias to be created or updated :arg body: The settings for the alias, such as `routing` or `filter` :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit timestamp for the document """ for param in (index, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path(index, "_alias", name), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", response_mimetypes=["application/json"], ) async def exists_alias(self, name, index=None, params=None, headers=None): """ Returns information about whether a particular alias exists. ``_ :arg name: A comma-separated list of alias names to return :arg index: A comma-separated list of index names to filter aliases :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "HEAD", _make_path(index, "_alias", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", response_mimetypes=["application/json"], ) async def get_alias(self, index=None, name=None, params=None, headers=None): """ Returns an alias. ``_ :arg index: A comma-separated list of index names to filter aliases :arg name: A comma-separated list of alias names to return :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ return await self.transport.perform_request( "GET", _make_path(index, "_alias", name), params=params, headers=headers ) @query_params( "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_aliases(self, body, params=None, headers=None): """ Updates index aliases. ``_ :arg body: The definition of `actions` to perform :arg master_timeout: Specify timeout for connection to master :arg timeout: Request timeout """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_aliases", params=params, headers=headers, body=body ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_alias(self, index, name, params=None, headers=None): """ Deletes an alias. ``_ :arg index: A comma-separated list of index names (supports wildcards); use `_all` for all indices :arg name: A comma-separated list of aliases to delete (supports wildcards); use `_all` to delete all aliases for the specified indices. :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit timestamp for the document """ for param in (index, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path(index, "_aliases", name), params=params, headers=headers, ) @query_params( "create", "include_type_name", "master_timeout", "order", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. ``_ :arg name: The name of the template :arg body: The template definition :arg create: Whether the index template should only be added if new or can also replace an existing one :arg include_type_name: Whether a type should be returned in the body of the mappings. :arg master_timeout: Specify timeout for connection to master :arg order: The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower numbers) """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_template", name), params=params, headers=headers, body=body, ) @query_params( "flat_settings", "local", "master_timeout", response_mimetypes=["application/json"], ) async def exists_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. ``_ :arg name: The comma separated names of the index templates :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "HEAD", _make_path("_template", name), params=params, headers=headers ) @query_params( "flat_settings", "include_type_name", "local", "master_timeout", response_mimetypes=["application/json"], ) async def get_template(self, name=None, params=None, headers=None): """ Returns an index template. ``_ :arg name: The comma separated names of the index templates :arg flat_settings: Return settings in flat format (default: false) :arg include_type_name: Whether a type should be returned in the body of the mappings. :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return await self.transport.perform_request( "GET", _make_path("_template", name), params=params, headers=headers ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_template(self, name, params=None, headers=None): """ Deletes an index template. ``_ :arg name: The name of the template :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_template", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", response_mimetypes=["application/json"], ) async def get_settings(self, index=None, name=None, params=None, headers=None): """ Returns settings for one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg name: The name of the settings that should be included :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_defaults: Whether to return all default setting for each of the indices. :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master """ return await self.transport.perform_request( "GET", _make_path(index, "_settings", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "master_timeout", "preserve_existing", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_settings(self, body, index=None, params=None, headers=None): """ Updates the index settings. ``_ :arg body: The index settings to be updated :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg preserve_existing: Whether to update existing settings. If set to `true` existing settings on an index remain unchanged, the default is `false` :arg timeout: Explicit operation timeout """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", _make_path(index, "_settings"), params=params, headers=headers, body=body, ) @query_params( "completion_fields", "expand_wildcards", "fielddata_fields", "fields", "forbid_closed_indices", "groups", "include_segment_file_sizes", "include_unloaded_segments", "level", "types", response_mimetypes=["application/json"], ) async def stats(self, index=None, metric=None, params=None, headers=None): """ Provides statistics on operations happening in an index. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg metric: Limit the information returned the specific metrics. :arg completion_fields: A comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fielddata_fields: A comma-separated list of fields for `fielddata` index metric (supports wildcards) :arg fields: A comma-separated list of fields for `fielddata` and `completion` index metric (supports wildcards) :arg forbid_closed_indices: If set to false stats will also collected from closed indices if explicitly specified or if expand_wildcards expands to closed indices Default: True :arg groups: A comma-separated list of search groups for `search` index metric :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested) :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg level: Return stats aggregated at cluster, index or shard level Valid choices: cluster, indices, shards Default: indices :arg types: A comma-separated list of document types for the `indexing` index metric """ return await self.transport.perform_request( "GET", _make_path(index, "_stats", metric), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose", response_mimetypes=["application/json"], ) async def segments(self, index=None, params=None, headers=None): """ Provides low-level information about segments in a Lucene index. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg verbose: Includes detailed memory usage by Lucene. """ return await self.transport.perform_request( "GET", _make_path(index, "_segments"), params=params, headers=headers ) @query_params( "all_shards", "allow_no_indices", "analyze_wildcard", "analyzer", "default_operator", "df", "expand_wildcards", "explain", "ignore_unavailable", "lenient", "q", "rewrite", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def validate_query( self, body=None, index=None, doc_type=None, params=None, headers=None ): """ Allows a user to validate a potentially expensive query without executing it. ``_ :arg body: The query definition specified with the Query DSL :arg index: A comma-separated list of index names to restrict the operation; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to restrict the operation; leave empty to perform the operation on all types :arg all_shards: Execute validation on all shards instead of one random shard per index :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: Return detailed information about the error :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg q: Query in the Lucene query string syntax :arg rewrite: Provide a more detailed explanation showing the actual Lucene query that will be executed. """ return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_validate", "query"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "fielddata", "fields", "ignore_unavailable", "query", "request", response_mimetypes=["application/json"], ) async def clear_cache(self, index=None, params=None, headers=None): """ Clears all or specific caches for one or more indices. ``_ :arg index: A comma-separated list of index name to limit the operation :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fielddata: Clear field data :arg fields: A comma-separated list of fields to clear when using the `fielddata` parameter (default: all) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg query: Clear query caches :arg request: Clear request cache """ return await self.transport.perform_request( "POST", _make_path(index, "_cache", "clear"), params=params, headers=headers ) @query_params( "active_only", "detailed", response_mimetypes=["application/json"], ) async def recovery(self, index=None, params=None, headers=None): """ Returns information about ongoing index shard recoveries. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg active_only: Display only those recoveries that are currently on-going :arg detailed: Whether to display detailed information about shard recovery """ return await self.transport.perform_request( "GET", _make_path(index, "_recovery"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "only_ancient_segments", "wait_for_completion", response_mimetypes=["application/json"], ) async def upgrade(self, index=None, params=None, headers=None): """ DEPRECATED Upgrades to the current version of Lucene. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg only_ancient_segments: If true, only ancient (an older Lucene major release) segments will be upgraded :arg wait_for_completion: Specify whether the request should block until the all segments are upgraded (default: false) """ return await self.transport.perform_request( "POST", _make_path(index, "_upgrade"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) async def get_upgrade(self, index=None, params=None, headers=None): """ DEPRECATED Returns a progress status of current upgrade. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return await self.transport.perform_request( "GET", _make_path(index, "_upgrade"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) async def flush_synced(self, index=None, params=None, headers=None): """ Performs a synced flush operation on one or more indices. Synced flush is deprecated and will be removed in 8.0. Use flush instead ``_ :arg index: A comma-separated list of index names; use `_all` or empty string for all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return await self.transport.perform_request( "POST", _make_path(index, "_flush", "synced"), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status", response_mimetypes=["application/json"], ) async def shard_stores(self, index=None, params=None, headers=None): """ Provides store information for shard copies of indices. ``_ :arg index: List of data streams, indices, and aliases used to limit the request. :arg allow_no_indices: If false, the request returns an error if any wildcard expression, index alias, or _all value targets only missing or closed indices. This behavior applies even if the request targets other open indices. :arg expand_wildcards: Type of index that wildcard patterns can match. If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: If true, missing or closed indices are not included in the response. :arg status: List of shard health statuses used to limit the request. Valid choices: green, yellow, red, all """ return await self.transport.perform_request( "GET", _make_path(index, "_shard_stores"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flush", "ignore_unavailable", "max_num_segments", "only_expunge_deletes", response_mimetypes=["application/json"], ) async def forcemerge(self, index=None, params=None, headers=None): """ Performs the force merge operation on one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flush: Specify whether the index should be flushed after performing the operation (default: true) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg max_num_segments: The number of segments the index should be merged into (default: dynamic) :arg only_expunge_deletes: Specify whether the operation should only expunge deleted documents """ return await self.transport.perform_request( "POST", _make_path(index, "_forcemerge"), params=params, headers=headers ) @query_params( "copy_settings", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def shrink(self, index, target, body=None, params=None, headers=None): """ Allow to shrink an existing index into a new index with fewer primary shards. ``_ :arg index: The name of the source index to shrink :arg target: The name of the target index to shrink into :arg body: The configuration for the target index (`settings` and `aliases`) :arg copy_settings: whether or not to copy settings from the source index (defaults to false) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ for param in (index, target): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path(index, "_shrink", target), params=params, headers=headers, body=body, ) @query_params( "copy_settings", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def split(self, index, target, body=None, params=None, headers=None): """ Allows you to split an existing index into a new index with more primary shards. ``_ :arg index: The name of the source index to split :arg target: The name of the target index to split into :arg body: The configuration for the target index (`settings` and `aliases`) :arg copy_settings: whether or not to copy settings from the source index (defaults to false) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ for param in (index, target): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path(index, "_split", target), params=params, headers=headers, body=body, ) @query_params( "dry_run", "include_type_name", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def rollover( self, alias, body=None, new_index=None, params=None, headers=None ): """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. ``_ :arg alias: The name of the alias to rollover :arg body: The conditions that needs to be met for executing rollover :arg new_index: The name of the rollover index :arg dry_run: If set to true the rollover action will only be validated but not actually performed even if a condition matches. The default is false :arg include_type_name: Whether a type should be included in the body of the mappings. :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the newly created rollover index before the operation returns. """ if alias in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'alias'.") return await self.transport.perform_request( "POST", _make_path(alias, "_rollover", new_index), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) async def freeze(self, index, params=None, headers=None): """ Freezes an index. A frozen index has almost no overhead on the cluster (except for maintaining its metadata in memory) and is read-only. ``_ :arg index: The name of the index to freeze :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: closed :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_freeze"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) async def unfreeze(self, index, params=None, headers=None): """ Unfreezes an index. When a frozen index is unfrozen, the index goes through the normal recovery process and becomes writeable again. ``_ :arg index: The name of the index to unfreeze :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: closed :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_unfreeze"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) async def reload_search_analyzers(self, index, params=None, headers=None): """ Reloads an index's search analyzers and their resources. ``_ :arg index: A comma-separated list of index names to reload analyzers for :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index, "_reload_search_analyzers"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def create_data_stream(self, name, params=None, headers=None): """ Creates a data stream ``_ :arg name: The name of the data stream """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "PUT", _make_path("_data_stream", name), params=params, headers=headers ) @query_params( "expand_wildcards", response_mimetypes=["application/json"], ) async def delete_data_stream(self, name, params=None, headers=None): """ Deletes a data stream. ``_ :arg name: A comma-separated list of data streams to delete; use `*` to delete all data streams :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_data_stream", name), params=params, headers=headers ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_index_template(self, name, params=None, headers=None): """ Deletes an index template. ``_ :arg name: The name of the template :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_index_template", name), params=params, headers=headers, ) @query_params( "flat_settings", "local", "master_timeout", response_mimetypes=["application/json"], ) async def exists_index_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. ``_ :arg name: The name of the template :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "HEAD", _make_path("_index_template", name), params=params, headers=headers ) @query_params( "flat_settings", "local", "master_timeout", response_mimetypes=["application/json"], ) async def get_index_template(self, name=None, params=None, headers=None): """ Returns an index template. ``_ :arg name: A pattern that returned template names must match :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return await self.transport.perform_request( "GET", _make_path("_index_template", name), params=params, headers=headers ) @query_params( "cause", "create", "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_index_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. ``_ :arg name: The name of the template :arg body: The template definition :arg cause: User defined reason for creating/updating the index template :arg create: Whether the index template should only be added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_index_template", name), params=params, headers=headers, body=body, ) @query_params( "cause", "create", "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def simulate_index_template(self, name, body=None, params=None, headers=None): """ Simulate matching the given index name against the index templates in the system ``_ :arg name: The name of the index (it must be a concrete index name) :arg body: New index template definition, which will be included in the simulation, as if it already exists in the system :arg cause: User defined reason for dry-run creating the new template for simulation purposes :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "POST", _make_path("_index_template", "_simulate_index", name), params=params, headers=headers, body=body, ) @query_params( "expand_wildcards", response_mimetypes=["application/json"], ) async def get_data_stream(self, name=None, params=None, headers=None): """ Returns data streams. ``_ :arg name: A comma-separated list of data streams to get; use `*` to get all data streams :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open """ return await self.transport.perform_request( "GET", _make_path("_data_stream", name), params=params, headers=headers ) @query_params( "cause", "create", "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def simulate_template(self, body=None, name=None, params=None, headers=None): """ Simulate resolving the given template name or body ``_ :arg body: New index template definition to be simulated, if no index template name is specified :arg name: The name of the index template :arg cause: User defined reason for dry-run creating the new template for simulation purposes :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master """ return await self.transport.perform_request( "POST", _make_path("_index_template", "_simulate", name), params=params, headers=headers, body=body, ) @query_params( "expand_wildcards", response_mimetypes=["application/json"], ) async def resolve_index(self, name, params=None, headers=None): """ Returns information about any matching indices, aliases, and data streams ``_ :arg name: A comma-separated list of names or wildcard expressions :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "GET", _make_path("_resolve", "index", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def add_block(self, index, block, params=None, headers=None): """ Adds a block to an index. ``_ :arg index: A comma separated list of indices to add a block to :arg block: The block to add (one of read, write, read_only or metadata) :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ for param in (index, block): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path(index, "_block", block), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def data_streams_stats(self, name=None, params=None, headers=None): """ Provides statistics on operations happening in a data stream. ``_ :arg name: A comma-separated list of data stream names; use `_all` or empty string to perform the operation on all data streams """ return await self.transport.perform_request( "GET", _make_path("_data_stream", name, "_stats"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def promote_data_stream(self, name, params=None, headers=None): """ Promotes a data stream from a replicated data stream managed by CCR to a regular data stream ``_ :arg name: The name of the data stream """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "POST", _make_path("_data_stream", "_promote", name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def migrate_to_data_stream(self, name, params=None, headers=None): """ Migrates an alias to a data stream ``_ :arg name: The name of the alias to migrate """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "POST", _make_path("_data_stream", "_migrate", name), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "flush", "ignore_unavailable", "run_expensive_tasks", response_mimetypes=["application/json"], ) async def disk_usage(self, index, params=None, headers=None): """ Analyzes the disk usage of each field of an index or data stream ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: Comma-separated list of indices or data streams to analyze the disk usage :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flush: Whether flush or not before analyzing the index disk usage. Defaults to true :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg run_expensive_tasks: Must be set to [true] in order for the task to be performed. Defaults to false. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "POST", _make_path(index, "_disk_usage"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable", response_mimetypes=["application/json"], ) async def field_usage_stats(self, index, params=None, headers=None): """ Returns the field usage stats for each field of an index ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fields: A comma-separated list of fields to include in the stats if only a subset of fields should be returned (supports wildcards) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index, "_field_usage_stats"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def modify_data_stream(self, body, params=None, headers=None): """ Modifies a data stream ``_ :arg body: The data stream modifications """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_data_stream/_modify", params=params, headers=headers, body=body ) elasticsearch-py-7.17.6/elasticsearch/_async/client/indices.pyi000066400000000000000000001645551426163262700246120ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, List, Mapping, MutableMapping, Optional, Tuple, Union, ) from typing_extensions import Literal from .utils import NamespacedClient class IndicesClient(NamespacedClient): async def analyze( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def refresh( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def flush( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., force: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., wait_if_ongoing: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def create( self, *, index: str, body: Optional[Mapping[str, Any]] = ..., aliases: Optional[Mapping[str, Mapping[str, Any]]] = ..., include_type_name: Optional[bool] = ..., mappings: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Union[int, str]] = ..., settings: Optional[Mapping[str, Any]] = ..., timeout: Optional[Union[int, str]] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clone( self, *, index: Any, target: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def open( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def close( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def exists( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def exists_type( self, *, index: Any, doc_type: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def put_mapping( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., include_type_name: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., write_index_only: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_mapping( self, *, index: Optional[Union[List[str], str]] = ..., doc_type: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_field_mapping( self, *, fields: Union[List[str], str], index: Optional[Union[List[str], str]] = ..., doc_type: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_alias( self, *, index: Any, name: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def exists_alias( self, *, name: Union[List[str], str], index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def get_alias( self, *, index: Optional[Union[List[str], str]] = ..., name: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_aliases( self, *, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_alias( self, *, index: Union[List[str], str], name: Union[List[str], str], master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_template( self, *, name: Any, body: Mapping[str, Any], create: Optional[bool] = ..., include_type_name: Optional[bool] = ..., master_timeout: Optional[Any] = ..., order: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def exists_template( self, *, name: Union[List[str], str], flat_settings: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def get_template( self, *, name: Optional[Union[List[str], str]] = ..., flat_settings: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_template( self, *, name: str, master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_settings( self, *, index: Optional[Union[List[str], str]] = ..., name: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_settings( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., preserve_existing: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stats( self, *, index: Optional[Union[List[str], str]] = ..., metric: Optional[Union[List[str], str]] = ..., completion_fields: Optional[Union[List[str], str]] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., fielddata_fields: Optional[Union[List[str], str]] = ..., fields: Optional[Union[List[str], str]] = ..., forbid_closed_indices: Optional[bool] = ..., groups: Optional[Union[List[str], str]] = ..., include_segment_file_sizes: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., level: Optional[Union[Literal["cluster", "indices", "shards"], str]] = ..., types: Optional[Union[List[str], str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def segments( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def validate_query( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., doc_type: Optional[Any] = ..., all_shards: Optional[bool] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., explain: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., q: Optional[Any] = ..., rewrite: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_cache( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., fielddata: Optional[bool] = ..., fields: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., query: Optional[bool] = ..., request: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def recovery( self, *, index: Optional[Any] = ..., active_only: Optional[bool] = ..., detailed: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def upgrade( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., only_ancient_segments: Optional[bool] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_upgrade( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def flush_synced( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def shard_stores( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., status: Optional[ Union[ List[Union[Literal["all", "green", "red", "yellow"], str]], Union[Literal["all", "green", "red", "yellow"], str], ] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def forcemerge( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flush: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., max_num_segments: Optional[Any] = ..., only_expunge_deletes: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def shrink( self, *, index: Any, target: Any, body: Optional[Mapping[str, Any]] = ..., copy_settings: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def split( self, *, index: Any, target: Any, body: Optional[Mapping[str, Any]] = ..., copy_settings: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def rollover( self, *, alias: Any, body: Optional[Mapping[str, Any]] = ..., new_index: Optional[Any] = ..., dry_run: Optional[bool] = ..., include_type_name: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def freeze( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def unfreeze( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def reload_search_analyzers( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def create_data_stream( self, *, name: str, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_data_stream( self, *, name: Union[List[str], str], expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_index_template( self, *, name: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def exists_index_template( self, *, name: Any, flat_settings: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... async def get_index_template( self, *, name: Optional[Any] = ..., flat_settings: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_index_template( self, *, name: Any, body: Mapping[str, Any], cause: Optional[Any] = ..., create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def simulate_index_template( self, *, name: Any, body: Optional[Mapping[str, Any]] = ..., cause: Optional[Any] = ..., create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_data_stream( self, *, name: Optional[Union[List[str], str]] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def simulate_template( self, *, body: Optional[Mapping[str, Any]] = ..., name: Optional[Any] = ..., cause: Optional[Any] = ..., create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def resolve_index( self, *, name: Any, expand_wildcards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def add_block( self, *, index: Any, block: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def data_streams_stats( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def promote_data_stream( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def migrate_to_data_stream( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def disk_usage( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flush: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., run_expensive_tasks: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def field_usage_stats( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., fields: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def modify_data_stream( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/ingest.py000066400000000000000000000125241426163262700243000ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): @query_params( "master_timeout", "summary", response_mimetypes=["application/json"], ) async def get_pipeline(self, id=None, params=None, headers=None): """ Returns a pipeline. ``_ :arg id: Comma separated list of pipeline ids. Wildcards supported :arg master_timeout: Explicit operation timeout for connection to master node :arg summary: Return pipelines without their definitions (default: false) """ return await self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers ) @query_params( "if_version", "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_pipeline(self, id, body, params=None, headers=None): """ Creates or updates a pipeline. ``_ :arg id: Pipeline ID :arg body: The ingest definition :arg if_version: Required version for optimistic concurrency control for pipeline updates :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ingest", "pipeline", id), params=params, headers=headers, body=body, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_pipeline(self, id, params=None, headers=None): """ Deletes a pipeline. ``_ :arg id: Pipeline ID :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_ingest", "pipeline", id), params=params, headers=headers, ) @query_params( "verbose", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def simulate(self, body, id=None, params=None, headers=None): """ Allows to simulate a pipeline with example documents. ``_ :arg body: The simulate definition :arg id: Pipeline ID :arg verbose: Verbose mode. Display data output for each processor in executed pipeline """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", _make_path("_ingest", "pipeline", id, "_simulate"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def processor_grok(self, params=None, headers=None): """ Returns a list of the built-in patterns. ``_ """ return await self.transport.perform_request( "GET", "/_ingest/processor/grok", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def geo_ip_stats(self, params=None, headers=None): """ Returns statistical information about geoip databases ``_ """ return await self.transport.perform_request( "GET", "/_ingest/geoip/stats", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/ingest.pyi000066400000000000000000000131421426163262700244460ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class IngestClient(NamespacedClient): async def get_pipeline( self, *, id: Optional[Any] = ..., master_timeout: Optional[Any] = ..., summary: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_pipeline( self, *, id: Any, body: Mapping[str, Any], if_version: Optional[Any] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_pipeline( self, *, id: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def simulate( self, *, body: Mapping[str, Any], id: Optional[Any] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def processor_grok( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def geo_ip_stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/license.py000066400000000000000000000114341426163262700244300ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class LicenseClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete(self, params=None, headers=None): """ Deletes licensing information for the cluster ``_ """ return await self.transport.perform_request( "DELETE", "/_license", params=params, headers=headers ) @query_params( "accept_enterprise", "local", response_mimetypes=["application/json"], ) async def get(self, params=None, headers=None): """ Retrieves licensing information for the cluster ``_ :arg accept_enterprise: If the active license is an enterprise license, return type as 'enterprise' (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) """ return await self.transport.perform_request( "GET", "/_license", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_basic_status(self, params=None, headers=None): """ Retrieves information about the status of the basic license. ``_ """ return await self.transport.perform_request( "GET", "/_license/basic_status", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_trial_status(self, params=None, headers=None): """ Retrieves information about the status of the trial license. ``_ """ return await self.transport.perform_request( "GET", "/_license/trial_status", params=params, headers=headers ) @query_params( "acknowledge", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def post(self, body=None, params=None, headers=None): """ Updates the license for the cluster. ``_ :arg body: licenses to be installed :arg acknowledge: whether the user has acknowledged acknowledge messages (default: false) """ return await self.transport.perform_request( "PUT", "/_license", params=params, headers=headers, body=body ) @query_params( "acknowledge", response_mimetypes=["application/json"], ) async def post_start_basic(self, params=None, headers=None): """ Starts an indefinite basic license. ``_ :arg acknowledge: whether the user has acknowledged acknowledge messages (default: false) """ return await self.transport.perform_request( "POST", "/_license/start_basic", params=params, headers=headers ) @query_params( "acknowledge", "type", response_mimetypes=["application/json"], ) async def post_start_trial(self, params=None, headers=None): """ starts a limited time trial license. ``_ :arg acknowledge: whether the user has acknowledged acknowledge messages (default: false) :arg type: The type of trial license to generate (default: "trial") """ return await self.transport.perform_request( "POST", "/_license/start_trial", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/license.pyi000066400000000000000000000141311426163262700245760ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class LicenseClient(NamespacedClient): async def delete( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get( self, *, accept_enterprise: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_basic_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_trial_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def post( self, *, body: Optional[Mapping[str, Any]] = ..., acknowledge: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def post_start_basic( self, *, acknowledge: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def post_start_trial( self, *, acknowledge: Optional[bool] = ..., type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/logstash.py000066400000000000000000000061021426163262700246260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class LogstashClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete_pipeline(self, id, params=None, headers=None): """ Deletes Logstash Pipelines used by Central Management ``_ :arg id: The ID of the Pipeline """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_logstash", "pipeline", id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_pipeline(self, id, params=None, headers=None): """ Retrieves Logstash Pipelines used by Central Management ``_ :arg id: A comma-separated list of Pipeline IDs """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_logstash", "pipeline", id), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_pipeline(self, id, body, params=None, headers=None): """ Adds and updates Logstash Pipelines used for Central Management ``_ :arg id: The ID of the Pipeline :arg body: The Pipeline to add or update """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_logstash", "pipeline", id), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/logstash.pyi000066400000000000000000000062011426163262700247770ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class LogstashClient(NamespacedClient): async def delete_pipeline( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_pipeline( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_pipeline( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/migration.py000066400000000000000000000047741426163262700250100ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, _make_path, query_params class MigrationClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def deprecations(self, index=None, params=None, headers=None): """ Retrieves information about different cluster, node, and index level settings that use deprecated features that will be removed or changed in the next major version. ``_ :arg index: Index pattern """ return await self.transport.perform_request( "GET", _make_path(index, "_migration", "deprecations"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_feature_upgrade_status(self, params=None, headers=None): """ Find out whether system features need to be upgraded or not ``_ """ return await self.transport.perform_request( "GET", "/_migration/system_features", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def post_feature_upgrade(self, params=None, headers=None): """ Begin upgrades for system features ``_ """ return await self.transport.perform_request( "POST", "/_migration/system_features", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/migration.pyi000066400000000000000000000060671426163262700251560ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class MigrationClient(NamespacedClient): async def deprecations( self, *, index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_feature_upgrade_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def post_feature_upgrade( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/ml.py000066400000000000000000002236331426163262700234240ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, _make_path, query_params class MlClient(NamespacedClient): @query_params( "allow_no_jobs", "allow_no_match", "force", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def close_job(self, job_id, body=None, params=None, headers=None): """ Closes one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. ``_ :arg job_id: The name of the job to close :arg body: The URL params optionally sent in the body :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg force: True if the job should be forcefully closed :arg timeout: Controls the time to wait until a job has closed. Default to 30 minutes """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_close"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def delete_calendar(self, calendar_id, params=None, headers=None): """ Deletes a calendar. ``_ :arg calendar_id: The ID of the calendar to delete """ if calendar_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'calendar_id'." ) return await self.transport.perform_request( "DELETE", _make_path("_ml", "calendars", calendar_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def delete_calendar_event( self, calendar_id, event_id, params=None, headers=None ): """ Deletes scheduled events from a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg event_id: The ID of the event to remove from the calendar """ for param in (calendar_id, event_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "calendars", calendar_id, "events", event_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def delete_calendar_job(self, calendar_id, job_id, params=None, headers=None): """ Deletes anomaly detection jobs from a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg job_id: The ID of the job to remove from the calendar """ for param in (calendar_id, job_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "calendars", calendar_id, "jobs", job_id), params=params, headers=headers, ) @query_params( "force", response_mimetypes=["application/json"], ) async def delete_datafeed(self, datafeed_id, params=None, headers=None): """ Deletes an existing datafeed. ``_ :arg datafeed_id: The ID of the datafeed to delete :arg force: True if the datafeed should be forcefully deleted """ if datafeed_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'datafeed_id'." ) return await self.transport.perform_request( "DELETE", _make_path("_ml", "datafeeds", datafeed_id), params=params, headers=headers, ) @query_params( "requests_per_second", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def delete_expired_data( self, body=None, job_id=None, params=None, headers=None ): """ Deletes expired and unused machine learning data. ``_ :arg body: deleting expired data parameters :arg job_id: The ID of the job(s) to perform expired data hygiene for :arg requests_per_second: The desired requests per second for the deletion processes. :arg timeout: How long can the underlying delete processes run until they are canceled """ return await self.transport.perform_request( "DELETE", _make_path("_ml", "_delete_expired_data", job_id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def delete_filter(self, filter_id, params=None, headers=None): """ Deletes a filter. ``_ :arg filter_id: The ID of the filter to delete """ if filter_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'filter_id'.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "filters", filter_id), params=params, headers=headers, ) @query_params( "allow_no_forecasts", "timeout", response_mimetypes=["application/json"], ) async def delete_forecast( self, job_id, forecast_id=None, params=None, headers=None ): """ Deletes forecasts from a machine learning job. ``_ :arg job_id: The ID of the job from which to delete forecasts :arg forecast_id: The ID of the forecast to delete, can be comma delimited list. Leaving blank implies `_all` :arg allow_no_forecasts: Whether to ignore if `_all` matches no forecasts :arg timeout: Controls the time to wait until the forecast(s) are deleted. Default to 30 seconds """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "anomaly_detectors", job_id, "_forecast", forecast_id), params=params, headers=headers, ) @query_params( "force", "wait_for_completion", response_mimetypes=["application/json"], ) async def delete_job(self, job_id, params=None, headers=None): """ Deletes an existing anomaly detection job. ``_ :arg job_id: The ID of the job to delete :arg force: True if the job should be forcefully deleted :arg wait_for_completion: Should this request wait until the operation has completed before returning Default: True """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "anomaly_detectors", job_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def delete_model_snapshot( self, job_id, snapshot_id, params=None, headers=None ): """ Deletes an existing model snapshot. ``_ :arg job_id: The ID of the job to fetch :arg snapshot_id: The ID of the snapshot to delete """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id ), params=params, headers=headers, ) @query_params( "advance_time", "calc_interim", "end", "skip_time", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def flush_job(self, job_id, body=None, params=None, headers=None): """ Forces any buffered data to be processed by the job. ``_ :arg job_id: The name of the job to flush :arg body: Flush parameters :arg advance_time: Advances time to the given value generating results and updating the model for the advanced interval :arg calc_interim: Calculates interim results for the most recent bucket or all buckets within the latency period :arg end: When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results :arg skip_time: Skips time to the given value without generating results or updating the model for the skipped interval :arg start: When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_flush"), params=params, headers=headers, body=body, ) @query_params( "duration", "expires_in", "max_model_memory", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def forecast(self, job_id, body=None, params=None, headers=None): """ Predicts the future behavior of a time series by using its historical behavior. ``_ :arg job_id: The ID of the job to forecast for :arg body: Query parameters can be specified in the body :arg duration: The duration of the forecast :arg expires_in: The time interval after which the forecast expires. Expired forecasts will be deleted at the first opportunity. :arg max_model_memory: The max memory able to be used by the forecast. Default is 20mb. """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_forecast"), params=params, headers=headers, body=body, ) @query_params( "anomaly_score", "desc", "end", "exclude_interim", "expand", "from_", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_buckets( self, job_id, body=None, timestamp=None, params=None, headers=None ): """ Retrieves anomaly detection job results for one or more buckets. ``_ :arg job_id: ID of the job to get bucket results from :arg body: Bucket selection details if not provided in URI :arg timestamp: The timestamp of the desired single bucket result :arg anomaly_score: Filter for the most anomalous buckets :arg desc: Set the sort direction :arg end: End time filter for buckets :arg exclude_interim: Exclude interim results :arg expand: Include anomaly records :arg from_: skips a number of buckets :arg size: specifies a max number of buckets to get :arg sort: Sort buckets by a particular field :arg start: Start time filter for buckets """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "results", "buckets", timestamp ), params=params, headers=headers, body=body, ) @query_params( "end", "from_", "job_id", "size", "start", response_mimetypes=["application/json"], ) async def get_calendar_events(self, calendar_id, params=None, headers=None): """ Retrieves information about the scheduled events in calendars. ``_ :arg calendar_id: The ID of the calendar containing the events :arg end: Get events before this time :arg from_: Skips a number of events :arg job_id: Get events for the job. When this option is used calendar_id must be '_all' :arg size: Specifies a max number of events to get :arg start: Get events after this time """ if "from_" in params: params["from"] = params.pop("from_") if calendar_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'calendar_id'." ) return await self.transport.perform_request( "GET", _make_path("_ml", "calendars", calendar_id, "events"), params=params, headers=headers, ) @query_params( "from_", "size", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_calendars( self, body=None, calendar_id=None, params=None, headers=None ): """ Retrieves configuration information for calendars. ``_ :arg body: The from and size parameters optionally sent in the body :arg calendar_id: The ID of the calendar to fetch :arg from_: skips a number of calendars :arg size: specifies a max number of calendars to get """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "POST", _make_path("_ml", "calendars", calendar_id), params=params, headers=headers, body=body, ) @query_params( "from_", "partition_field_value", "size", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_categories( self, job_id, body=None, category_id=None, params=None, headers=None ): """ Retrieves anomaly detection job results for one or more categories. ``_ :arg job_id: The name of the job :arg body: Category selection details if not provided in URI :arg category_id: The identifier of the category definition of interest :arg from_: skips a number of categories :arg partition_field_value: Specifies the partition to retrieve categories for. This is optional, and should never be used for jobs where per-partition categorization is disabled. :arg size: specifies a max number of categories to get """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "results", "categories", category_id ), params=params, headers=headers, body=body, ) @query_params( "allow_no_datafeeds", "allow_no_match", response_mimetypes=["application/json"], ) async def get_datafeed_stats(self, datafeed_id=None, params=None, headers=None): """ Retrieves usage information for datafeeds. ``_ :arg datafeed_id: The ID of the datafeeds stats to fetch :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) """ return await self.transport.perform_request( "GET", _make_path("_ml", "datafeeds", datafeed_id, "_stats"), params=params, headers=headers, ) @query_params( "allow_no_datafeeds", "allow_no_match", "exclude_generated", response_mimetypes=["application/json"], ) async def get_datafeeds(self, datafeed_id=None, params=None, headers=None): """ Retrieves configuration information for datafeeds. ``_ :arg datafeed_id: The ID of the datafeeds to fetch :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg exclude_generated: Omits fields that are illegal to set on datafeed PUT """ return await self.transport.perform_request( "GET", _make_path("_ml", "datafeeds", datafeed_id), params=params, headers=headers, ) @query_params( "from_", "size", response_mimetypes=["application/json"], ) async def get_filters(self, filter_id=None, params=None, headers=None): """ Retrieves filters. ``_ :arg filter_id: The ID of the filter to fetch :arg from_: skips a number of filters :arg size: specifies a max number of filters to get """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_ml", "filters", filter_id), params=params, headers=headers, ) @query_params( "desc", "end", "exclude_interim", "from_", "influencer_score", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_influencers(self, job_id, body=None, params=None, headers=None): """ Retrieves anomaly detection job results for one or more influencers. ``_ :arg job_id: Identifier for the anomaly detection job :arg body: Influencer selection criteria :arg desc: whether the results should be sorted in decending order :arg end: end timestamp for the requested influencers :arg exclude_interim: Exclude interim results :arg from_: skips a number of influencers :arg influencer_score: influencer score threshold for the requested influencers :arg size: specifies a max number of influencers to get :arg sort: sort field for the requested influencers :arg start: start timestamp for the requested influencers """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "results", "influencers"), params=params, headers=headers, body=body, ) @query_params( "allow_no_jobs", "allow_no_match", response_mimetypes=["application/json"], ) async def get_job_stats(self, job_id=None, params=None, headers=None): """ Retrieves usage information for anomaly detection jobs. ``_ :arg job_id: The ID of the jobs stats to fetch :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) """ return await self.transport.perform_request( "GET", _make_path("_ml", "anomaly_detectors", job_id, "_stats"), params=params, headers=headers, ) @query_params( "allow_no_jobs", "allow_no_match", "exclude_generated", response_mimetypes=["application/json"], ) async def get_jobs(self, job_id=None, params=None, headers=None): """ Retrieves configuration information for anomaly detection jobs. ``_ :arg job_id: The ID of the jobs to fetch :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg exclude_generated: Omits fields that are illegal to set on job PUT """ return await self.transport.perform_request( "GET", _make_path("_ml", "anomaly_detectors", job_id), params=params, headers=headers, ) @query_params( "desc", "end", "from_", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_model_snapshots( self, job_id, body=None, snapshot_id=None, params=None, headers=None ): """ Retrieves information about model snapshots. ``_ :arg job_id: The ID of the job to fetch :arg body: Model snapshot selection criteria :arg snapshot_id: The ID of the snapshot to fetch :arg desc: True if the results should be sorted in descending order :arg end: The filter 'end' query parameter :arg from_: Skips a number of documents :arg size: The default number of documents returned in queries as a string. :arg sort: Name of the field to sort on :arg start: The filter 'start' query parameter """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id ), params=params, headers=headers, body=body, ) @query_params( "allow_no_jobs", "allow_no_match", "bucket_span", "end", "exclude_interim", "overall_score", "start", "top_n", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_overall_buckets(self, job_id, body=None, params=None, headers=None): """ Retrieves overall bucket results that summarize the bucket results of multiple anomaly detection jobs. ``_ :arg job_id: The job IDs for which to calculate overall bucket results :arg body: Overall bucket selection details if not provided in URI :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg bucket_span: The span of the overall buckets. Defaults to the longest job bucket_span :arg end: Returns overall buckets with timestamps earlier than this time :arg exclude_interim: If true overall buckets that include interim buckets will be excluded :arg overall_score: Returns overall buckets with overall scores higher than this value :arg start: Returns overall buckets with timestamps after this time :arg top_n: The number of top job bucket scores to be used in the overall_score calculation """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "results", "overall_buckets" ), params=params, headers=headers, body=body, ) @query_params( "desc", "end", "exclude_interim", "from_", "record_score", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_records(self, job_id, body=None, params=None, headers=None): """ Retrieves anomaly records for an anomaly detection job. ``_ :arg job_id: The ID of the job :arg body: Record selection criteria :arg desc: Set the sort direction :arg end: End time filter for records :arg exclude_interim: Exclude interim results :arg from_: skips a number of records :arg record_score: Returns records with anomaly scores greater or equal than this value :arg size: specifies a max number of records to get :arg sort: Sort records by a particular field :arg start: Start time filter for records """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "results", "records"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def info(self, params=None, headers=None): """ Returns defaults and limits used by machine learning. ``_ """ return await self.transport.perform_request( "GET", "/_ml/info", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def open_job(self, job_id, body=None, params=None, headers=None): """ Opens one or more anomaly detection jobs. ``_ :arg job_id: The ID of the job to open :arg body: Query parameters can be specified in the body """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_open"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def post_calendar_events(self, calendar_id, body, params=None, headers=None): """ Posts scheduled events in a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg body: A list of events """ for param in (calendar_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_ml", "calendars", calendar_id, "events"), params=params, headers=headers, body=body, ) @query_params( "reset_end", "reset_start", request_mimetypes=["application/x-ndjson", "application/json"], response_mimetypes=["application/json"], ) async def post_data(self, job_id, body, params=None, headers=None): """ Sends data to an anomaly detection job for analysis. ``_ :arg job_id: The name of the job receiving the data :arg body: The data to process :arg reset_end: Optional parameter to specify the end of the bucket resetting range :arg reset_start: Optional parameter to specify the start of the bucket resetting range """ for param in (job_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_data"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def preview_datafeed( self, body=None, datafeed_id=None, params=None, headers=None ): """ Previews a datafeed. ``_ :arg body: The datafeed config and job config with which to execute the preview :arg datafeed_id: The ID of the datafeed to preview """ return await self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_preview"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_calendar(self, calendar_id, body=None, params=None, headers=None): """ Instantiates a calendar. ``_ :arg calendar_id: The ID of the calendar to create :arg body: The calendar details """ if calendar_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'calendar_id'." ) return await self.transport.perform_request( "PUT", _make_path("_ml", "calendars", calendar_id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def put_calendar_job(self, calendar_id, job_id, params=None, headers=None): """ Adds an anomaly detection job to a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg job_id: The ID of the job to add to the calendar """ for param in (calendar_id, job_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ml", "calendars", calendar_id, "jobs", job_id), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_throttled", "ignore_unavailable", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_datafeed(self, datafeed_id, body, params=None, headers=None): """ Instantiates a datafeed. ``_ :arg datafeed_id: The ID of the datafeed to create :arg body: The datafeed config :arg allow_no_indices: Ignore if the source indices expressions resolves to no concrete indices (default: true) :arg expand_wildcards: Whether source index expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all :arg ignore_throttled: Ignore indices that are marked as throttled (default: true) :arg ignore_unavailable: Ignore unavailable indexes (default: false) """ for param in (datafeed_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ml", "datafeeds", datafeed_id), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_filter(self, filter_id, body, params=None, headers=None): """ Instantiates a filter. ``_ :arg filter_id: The ID of the filter to create :arg body: The filter details """ for param in (filter_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ml", "filters", filter_id), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_throttled", "ignore_unavailable", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_job(self, job_id, body, params=None, headers=None): """ Instantiates an anomaly detection job. ``_ :arg job_id: The ID of the job to create :arg body: The job :arg allow_no_indices: Ignore if the source indices expressions resolves to no concrete indices (default: true). Only set if datafeed_config is provided. :arg expand_wildcards: Whether source index expressions should get expanded to open or closed indices (default: open). Only set if datafeed_config is provided. Valid choices: open, closed, hidden, none, all :arg ignore_throttled: Ignore indices that are marked as throttled (default: true). Only set if datafeed_config is provided. :arg ignore_unavailable: Ignore unavailable indexes (default: false). Only set if datafeed_config is provided. """ for param in (job_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ml", "anomaly_detectors", job_id), params=params, headers=headers, body=body, ) @query_params( "delete_intervening_results", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def revert_model_snapshot( self, job_id, snapshot_id, body=None, params=None, headers=None ): """ Reverts to a specific snapshot. ``_ :arg job_id: The ID of the job to fetch :arg snapshot_id: The ID of the snapshot to revert to :arg body: Reversion options :arg delete_intervening_results: Should we reset the results back to the time of the snapshot? """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_revert", ), params=params, headers=headers, body=body, ) @query_params( "enabled", "timeout", response_mimetypes=["application/json"], ) async def set_upgrade_mode(self, params=None, headers=None): """ Sets a cluster wide upgrade_mode setting that prepares machine learning indices for an upgrade. ``_ :arg enabled: Whether to enable upgrade_mode ML setting or not. Defaults to false. :arg timeout: Controls the time to wait before action times out. Defaults to 30 seconds """ return await self.transport.perform_request( "POST", "/_ml/set_upgrade_mode", params=params, headers=headers ) @query_params( "end", "start", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def start_datafeed(self, datafeed_id, body=None, params=None, headers=None): """ Starts one or more datafeeds. ``_ :arg datafeed_id: The ID of the datafeed to start :arg body: The start datafeed parameters :arg end: The end time when the datafeed should stop. When not set, the datafeed continues in real time :arg start: The start time from where the datafeed should begin :arg timeout: Controls the time to wait until a datafeed has started. Default to 20 seconds """ if datafeed_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'datafeed_id'." ) return await self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_start"), params=params, headers=headers, body=body, ) @query_params( "allow_no_datafeeds", "allow_no_match", "force", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def stop_datafeed(self, datafeed_id, body=None, params=None, headers=None): """ Stops one or more datafeeds. ``_ :arg datafeed_id: The ID of the datafeed to stop :arg body: The URL params optionally sent in the body :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg force: True if the datafeed should be forcefully stopped. :arg timeout: Controls the time to wait until a datafeed has stopped. Default to 20 seconds """ if datafeed_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'datafeed_id'." ) return await self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_stop"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_throttled", "ignore_unavailable", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_datafeed(self, datafeed_id, body, params=None, headers=None): """ Updates certain properties of a datafeed. ``_ :arg datafeed_id: The ID of the datafeed to update :arg body: The datafeed update settings :arg allow_no_indices: Ignore if the source indices expressions resolves to no concrete indices (default: true) :arg expand_wildcards: Whether source index expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all :arg ignore_throttled: Ignore indices that are marked as throttled (default: true) :arg ignore_unavailable: Ignore unavailable indexes (default: false) """ for param in (datafeed_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_update"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_filter(self, filter_id, body, params=None, headers=None): """ Updates the description of a filter, adds items, or removes items. ``_ :arg filter_id: The ID of the filter to update :arg body: The filter update """ for param in (filter_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_ml", "filters", filter_id, "_update"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_job(self, job_id, body, params=None, headers=None): """ Updates certain properties of an anomaly detection job. ``_ :arg job_id: The ID of the job to create :arg body: The job update settings """ for param in (job_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_update"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_model_snapshot( self, job_id, snapshot_id, body, params=None, headers=None ): """ Updates certain properties of a snapshot. ``_ :arg job_id: The ID of the job to fetch :arg snapshot_id: The ID of the snapshot to update :arg body: The model snapshot properties to update """ for param in (job_id, snapshot_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_update", ), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def validate(self, body, params=None, headers=None): """ Validates an anomaly detection job. ``_ :arg body: The job config """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_ml/anomaly_detectors/_validate", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def validate_detector(self, body, params=None, headers=None): """ Validates an anomaly detection detector. ``_ :arg body: The detector """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_ml/anomaly_detectors/_validate/detector", params=params, headers=headers, body=body, ) @query_params( "force", "timeout", response_mimetypes=["application/json"], ) async def delete_data_frame_analytics(self, id, params=None, headers=None): """ Deletes an existing data frame analytics job. ``_ :arg id: The ID of the data frame analytics to delete :arg force: True if the job should be forcefully deleted :arg timeout: Controls the time to wait until a job is deleted. Defaults to 1 minute """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "data_frame", "analytics", id), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def evaluate_data_frame(self, body, params=None, headers=None): """ Evaluates the data frame analytics for an annotated index. ``_ :arg body: The evaluation definition """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_ml/data_frame/_evaluate", params=params, headers=headers, body=body, ) @query_params( "allow_no_match", "exclude_generated", "from_", "size", response_mimetypes=["application/json"], ) async def get_data_frame_analytics(self, id=None, params=None, headers=None): """ Retrieves configuration information for data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no data frame analytics. (This includes `_all` string or when no data frame analytics have been specified) Default: True :arg exclude_generated: Omits fields that are illegal to set on data frame analytics PUT :arg from_: skips a number of analytics :arg size: specifies a max number of analytics to get Default: 100 """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_ml", "data_frame", "analytics", id), params=params, headers=headers, ) @query_params( "allow_no_match", "from_", "size", "verbose", response_mimetypes=["application/json"], ) async def get_data_frame_analytics_stats(self, id=None, params=None, headers=None): """ Retrieves usage information for data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics stats to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no data frame analytics. (This includes `_all` string or when no data frame analytics have been specified) Default: True :arg from_: skips a number of analytics :arg size: specifies a max number of analytics to get Default: 100 :arg verbose: whether the stats response should be verbose """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_ml", "data_frame", "analytics", id, "_stats"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_data_frame_analytics(self, id, body, params=None, headers=None): """ Instantiates a data frame analytics job. ``_ :arg id: The ID of the data frame analytics to create :arg body: The data frame analytics configuration """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ml", "data_frame", "analytics", id), params=params, headers=headers, body=body, ) @query_params( "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def start_data_frame_analytics( self, id, body=None, params=None, headers=None ): """ Starts a data frame analytics job. ``_ :arg id: The ID of the data frame analytics to start :arg body: The start data frame analytics parameters :arg timeout: Controls the time to wait until the task has started. Defaults to 20 seconds """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_start"), params=params, headers=headers, body=body, ) @query_params( "allow_no_match", "force", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def stop_data_frame_analytics(self, id, body=None, params=None, headers=None): """ Stops one or more data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics to stop :arg body: The stop data frame analytics parameters :arg allow_no_match: Whether to ignore if a wildcard expression matches no data frame analytics. (This includes `_all` string or when no data frame analytics have been specified) :arg force: True if the data frame analytics should be forcefully stopped :arg timeout: Controls the time to wait until the task has stopped. Defaults to 20 seconds """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_stop"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def delete_trained_model(self, model_id, params=None, headers=None): """ Deletes an existing trained inference model that is currently not referenced by an ingest pipeline. ``_ :arg model_id: The ID of the trained model to delete """ if model_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'model_id'.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "trained_models", model_id), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def explain_data_frame_analytics( self, body=None, id=None, params=None, headers=None ): """ Explains a data frame analytics config. ``_ :arg body: The data frame analytics config to explain :arg id: The ID of the data frame analytics to explain """ return await self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_explain"), params=params, headers=headers, body=body, ) @query_params( "allow_no_match", "decompress_definition", "exclude_generated", "from_", "include", "include_model_definition", "size", "tags", response_mimetypes=["application/json"], ) async def get_trained_models(self, model_id=None, params=None, headers=None): """ Retrieves configuration information for a trained inference model. ``_ :arg model_id: The ID of the trained models to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no trained models. (This includes `_all` string or when no trained models have been specified) Default: True :arg decompress_definition: Should the model definition be decompressed into valid JSON or returned in a custom compressed format. Defaults to true. Default: True :arg exclude_generated: Omits fields that are illegal to set on model PUT :arg from_: skips a number of trained models :arg include: A comma-separate list of fields to optionally include. Valid options are 'definition' and 'total_feature_importance'. Default is none. :arg include_model_definition: Should the full model definition be included in the results. These definitions can be large. So be cautious when including them. Defaults to false. :arg size: specifies a max number of trained models to get Default: 100 :arg tags: A comma-separated list of tags that the model must have. """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_ml", "trained_models", model_id), params=params, headers=headers, ) @query_params( "allow_no_match", "from_", "size", response_mimetypes=["application/json"], ) async def get_trained_models_stats(self, model_id=None, params=None, headers=None): """ Retrieves usage information for trained inference models. ``_ :arg model_id: The ID of the trained models stats to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no trained models. (This includes `_all` string or when no trained models have been specified) Default: True :arg from_: skips a number of trained models :arg size: specifies a max number of trained models to get Default: 100 """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_ml", "trained_models", model_id, "_stats"), params=params, headers=headers, ) @query_params( "defer_definition_decompression", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_trained_model(self, model_id, body, params=None, headers=None): """ Creates an inference trained model. ``_ :arg model_id: The ID of the trained models to store :arg body: The trained model configuration :arg defer_definition_decompression: If set to `true` and a `compressed_definition` is provided, the request defers definition decompression and skips relevant validations. """ for param in (model_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ml", "trained_models", model_id), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def estimate_model_memory(self, body, params=None, headers=None): """ Estimates the model memory ``_ :arg body: The analysis config, plus cardinality estimates for fields it references """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_ml/anomaly_detectors/_estimate_model_memory", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_data_frame_analytics(self, id, body, params=None, headers=None): """ Updates certain properties of a data frame analytics job. ``_ :arg id: The ID of the data frame analytics to update :arg body: The data frame analytics settings to update """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_update"), params=params, headers=headers, body=body, ) @query_params( "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) async def upgrade_job_snapshot( self, job_id, snapshot_id, params=None, headers=None ): """ Upgrades a given job snapshot to the current major version. ``_ :arg job_id: The ID of the job :arg snapshot_id: The ID of the snapshot :arg timeout: How long should the API wait for the job to be opened and the old snapshot to be loaded. :arg wait_for_completion: Should the request wait until the task is complete before responding to the caller. Default is false. """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_upgrade", ), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def delete_trained_model_alias( self, model_id, model_alias, params=None, headers=None ): """ Deletes a model alias that refers to the trained model ``_ :arg model_id: The trained model where the model alias is assigned :arg model_alias: The trained model alias to delete """ for param in (model_id, model_alias): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path("_ml", "trained_models", model_id, "model_aliases", model_alias), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def preview_data_frame_analytics( self, body=None, id=None, params=None, headers=None ): """ Previews that will be analyzed given a data frame analytics config. ``_ :arg body: The data frame analytics config to preview :arg id: The ID of the data frame analytics to preview """ return await self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_preview"), params=params, headers=headers, body=body, ) @query_params( "reassign", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_trained_model_alias( self, model_id, model_alias, params=None, headers=None ): """ Creates a new model alias (or reassigns an existing one) to refer to the trained model ``_ :arg model_id: The trained model where the model alias should be assigned :arg model_alias: The trained model alias to update :arg reassign: If the model_alias already exists and points to a separate model_id, this parameter must be true. Defaults to false. """ for param in (model_id, model_alias): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_ml", "trained_models", model_id, "model_aliases", model_alias), params=params, headers=headers, ) @query_params( "charset", "column_names", "delimiter", "explain", "format", "grok_pattern", "has_header_row", "line_merge_size_limit", "lines_to_sample", "quote", "should_trim_fields", "timeout", "timestamp_field", "timestamp_format", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) async def find_file_structure(self, body, params=None, headers=None): """ Finds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg body: The contents of the file to be analyzed :arg charset: Optional parameter to specify the character set of the file :arg column_names: Optional parameter containing a comma separated list of the column names for a delimited file :arg delimiter: Optional parameter to specify the delimiter character for a delimited file - must be a single character :arg explain: Whether to include a commentary on how the structure was derived :arg format: Optional parameter to specify the high level file format Valid choices: ndjson, xml, delimited, semi_structured_text :arg grok_pattern: Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi- structured text file :arg has_header_row: Optional parameter to specify whether a delimited file includes the column names in its first row :arg line_merge_size_limit: Maximum number of characters permitted in a single message when lines are merged to create messages. Default: 10000 :arg lines_to_sample: How many lines of the file should be included in the analysis Default: 1000 :arg quote: Optional parameter to specify the quote character for a delimited file - must be a single character :arg should_trim_fields: Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them :arg timeout: Timeout after which the analysis will be aborted Default: 25s :arg timestamp_field: Optional parameter to specify the timestamp field in the file :arg timestamp_format: Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", "/_ml/find_file_structure", params=params, headers=headers, body=body, ) @query_params( "wait_for_completion", response_mimetypes=["application/json"], ) async def reset_job(self, job_id, params=None, headers=None): """ Resets an existing anomaly detection job. ``_ :arg job_id: The ID of the job to reset :arg wait_for_completion: Should this request wait until the operation has completed before returning Default: True """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return await self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_reset"), params=params, headers=headers, ) @query_params( "allow_no_match", response_mimetypes=["application/json"], ) async def get_model_snapshot_upgrade_stats( self, job_id, snapshot_id, params=None, headers=None ): """ Gets stats for anomaly detection job model snapshot upgrades that are in progress. ``_ :arg job_id: The ID of the job. May be a wildcard, comma separated list or `_all`. :arg snapshot_id: The ID of the snapshot. May be a wildcard, comma separated list or `_all`. :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs or no snapshots. (This includes the `_all` string.) """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "GET", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_upgrade", "_stats", ), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/ml.pyi000066400000000000000000001606061426163262700235750ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class MlClient(NamespacedClient): async def close_job( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_calendar( self, *, calendar_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_calendar_event( self, *, calendar_id: Any, event_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_calendar_job( self, *, calendar_id: Any, job_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_datafeed( self, *, datafeed_id: Any, force: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_expired_data( self, *, body: Optional[Mapping[str, Any]] = ..., job_id: Optional[Any] = ..., requests_per_second: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_filter( self, *, filter_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_forecast( self, *, job_id: Any, forecast_id: Optional[Any] = ..., allow_no_forecasts: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_job( self, *, job_id: Any, force: Optional[bool] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_model_snapshot( self, *, job_id: Any, snapshot_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def flush_job( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., advance_time: Optional[Any] = ..., calc_interim: Optional[bool] = ..., end: Optional[Any] = ..., skip_time: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def forecast( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., duration: Optional[Any] = ..., expires_in: Optional[Any] = ..., max_model_memory: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_buckets( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., timestamp: Optional[Any] = ..., anomaly_score: Optional[Any] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., expand: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_calendar_events( self, *, calendar_id: Any, end: Optional[Any] = ..., from_: Optional[Any] = ..., job_id: Optional[Any] = ..., size: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_calendars( self, *, body: Optional[Mapping[str, Any]] = ..., calendar_id: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_categories( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., category_id: Optional[Any] = ..., from_: Optional[Any] = ..., partition_field_value: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_datafeed_stats( self, *, datafeed_id: Optional[Any] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_datafeeds( self, *, datafeed_id: Optional[Any] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_filters( self, *, filter_id: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_influencers( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., from_: Optional[Any] = ..., influencer_score: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_job_stats( self, *, job_id: Optional[Any] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_jobs( self, *, job_id: Optional[Any] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_model_snapshots( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., snapshot_id: Optional[Any] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_overall_buckets( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., bucket_span: Optional[Any] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., overall_score: Optional[Any] = ..., start: Optional[Any] = ..., top_n: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_records( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., from_: Optional[Any] = ..., record_score: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def info( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def open_job( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def post_calendar_events( self, *, calendar_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def post_data( self, *, job_id: Any, body: Union[Mapping[str, Any], Sequence[Mapping[str, Any]], bytes, str], reset_end: Optional[Any] = ..., reset_start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def preview_datafeed( self, *, body: Optional[Mapping[str, Any]] = ..., datafeed_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_calendar( self, *, calendar_id: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_calendar_job( self, *, calendar_id: Any, job_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_datafeed( self, *, datafeed_id: Any, body: Mapping[str, Any], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_filter( self, *, filter_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_job( self, *, job_id: Any, body: Mapping[str, Any], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def revert_model_snapshot( self, *, job_id: Any, snapshot_id: Any, body: Optional[Mapping[str, Any]] = ..., delete_intervening_results: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def set_upgrade_mode( self, *, enabled: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def start_datafeed( self, *, datafeed_id: Any, body: Optional[Mapping[str, Any]] = ..., end: Optional[Any] = ..., start: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stop_datafeed( self, *, datafeed_id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_datafeed( self, *, datafeed_id: Any, body: Mapping[str, Any], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_filter( self, *, filter_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_job( self, *, job_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_model_snapshot( self, *, job_id: Any, snapshot_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def validate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def validate_detector( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_data_frame_analytics( self, *, id: Any, force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def evaluate_data_frame( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_data_frame_analytics( self, *, id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_data_frame_analytics_stats( self, *, id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_data_frame_analytics( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def start_data_frame_analytics( self, *, id: Any, body: Optional[Mapping[str, Any]] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stop_data_frame_analytics( self, *, id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_trained_model( self, *, model_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def explain_data_frame_analytics( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_trained_models( self, *, model_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., decompress_definition: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., from_: Optional[Any] = ..., include: Optional[Any] = ..., include_model_definition: Optional[bool] = ..., size: Optional[Any] = ..., tags: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_trained_models_stats( self, *, model_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_trained_model( self, *, model_id: Any, body: Mapping[str, Any], defer_definition_decompression: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def estimate_model_memory( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_data_frame_analytics( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def upgrade_job_snapshot( self, *, job_id: Any, snapshot_id: Any, timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_trained_model_alias( self, *, model_id: Any, model_alias: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def preview_data_frame_analytics( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_trained_model_alias( self, *, model_id: Any, model_alias: Any, reassign: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def find_file_structure( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], charset: Optional[Any] = ..., column_names: Optional[Any] = ..., delimiter: Optional[Any] = ..., explain: Optional[bool] = ..., format: Optional[Any] = ..., grok_pattern: Optional[Any] = ..., has_header_row: Optional[bool] = ..., line_merge_size_limit: Optional[Any] = ..., lines_to_sample: Optional[Any] = ..., quote: Optional[Any] = ..., should_trim_fields: Optional[bool] = ..., timeout: Optional[Any] = ..., timestamp_field: Optional[Any] = ..., timestamp_format: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def reset_job( self, *, job_id: Any, wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_model_snapshot_upgrade_stats( self, *, job_id: Any, snapshot_id: Any, allow_no_match: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/monitoring.py000066400000000000000000000042421426163262700251720ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, _make_path, query_params class MonitoringClient(NamespacedClient): @query_params( "interval", "system_api_version", "system_id", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) async def bulk(self, body, doc_type=None, params=None, headers=None): """ Used by the monitoring features to send monitoring data. ``_ :arg body: The operation definition and data (action-data pairs), separated by newlines :arg doc_type: Default document type for items which don't provide one :arg interval: Collection interval (e.g., '10s' or '10000ms') of the payload :arg system_api_version: API Version of the monitored system :arg system_id: Identifier of the monitored system """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", _make_path("_monitoring", doc_type, "bulk"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/monitoring.pyi000066400000000000000000000036211426163262700253430ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class MonitoringClient(NamespacedClient): async def bulk( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], doc_type: Optional[Any] = ..., interval: Optional[Any] = ..., system_api_version: Optional[Any] = ..., system_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/nodes.py000066400000000000000000000251421426163262700241170ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params( "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def reload_secure_settings( self, body=None, node_id=None, params=None, headers=None ): """ Reloads secure settings. ``_ :arg body: An object containing the password for the elasticsearch keystore :arg node_id: A comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes. :arg timeout: Explicit operation timeout """ return await self.transport.perform_request( "POST", _make_path("_nodes", node_id, "reload_secure_settings"), params=params, headers=headers, body=body, ) @query_params( "flat_settings", "timeout", response_mimetypes=["application/json"], ) async def info(self, node_id=None, metric=None, params=None, headers=None): """ Returns information about nodes in the cluster. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg metric: A comma-separated list of metrics you wish returned. Use `_all` to retrieve all metrics and `_none` to retrieve the node identity without any additional metrics. Valid choices: settings, os, process, jvm, thread_pool, transport, http, plugins, ingest, indices, aggregations, _all, _none :arg flat_settings: Return settings in flat format (default: false) :arg timeout: Explicit operation timeout """ return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, metric), params=params, headers=headers ) @query_params( "completion_fields", "fielddata_fields", "fields", "groups", "include_segment_file_sizes", "include_unloaded_segments", "level", "timeout", "types", response_mimetypes=["application/json"], ) async def stats( self, node_id=None, metric=None, index_metric=None, params=None, headers=None ): """ Returns statistical information about nodes in the cluster. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg metric: Limit the information returned to the specified metrics Valid choices: _all, breaker, fs, http, indices, jvm, os, process, thread_pool, transport, discovery, indexing_pressure :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified. Valid choices: _all, completion, docs, fielddata, query_cache, flush, get, indexing, merge, request_cache, refresh, search, segments, store, warmer, suggest, shard_stats :arg completion_fields: A comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards) :arg fielddata_fields: A comma-separated list of fields for `fielddata` index metric (supports wildcards) :arg fields: A comma-separated list of fields for `fielddata` and `completion` index metric (supports wildcards) :arg groups: A comma-separated list of search groups for `search` index metric :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested) :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg level: Return indices stats aggregated at index, node or shard level Valid choices: indices, node, shards Default: node :arg timeout: Explicit operation timeout :arg types: A comma-separated list of document types for the `indexing` index metric """ return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, "stats", metric, index_metric), params=params, headers=headers, ) @query_params( "ignore_idle_threads", "interval", "snapshots", "sort", "threads", "timeout", "type", response_mimetypes=["text/plain"], ) async def hot_threads(self, node_id=None, params=None, headers=None): """ Returns information about hot threads on each node in the cluster. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty task queue (default: true) :arg interval: The interval for the second sampling of threads :arg snapshots: Number of samples of thread stacktrace (default: 10) :arg sort: The sort order for 'cpu' type (default: total) Valid choices: cpu, total :arg threads: Specify the number of threads to provide information for (default: 3) :arg timeout: Explicit operation timeout :arg type: The type to sample (default: cpu) Valid choices: cpu, wait, block, mem """ return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, "hot_threads"), params=params, headers=headers, ) @query_params( "timeout", response_mimetypes=["application/json"], ) async def usage(self, node_id=None, metric=None, params=None, headers=None): """ Returns low-level information about REST actions usage on nodes. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg metric: Limit the information returned to the specified metrics Valid choices: _all, rest_actions :arg timeout: Explicit operation timeout """ return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, "usage", metric), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def clear_repositories_metering_archive( self, node_id, max_archive_version, params=None, headers=None ): """ Removes the archived repositories metering information present in the cluster. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg node_id: Comma-separated list of node IDs or names used to limit returned information. :arg max_archive_version: Specifies the maximum archive_version to be cleared from the archive. """ for param in (node_id, max_archive_version): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path( "_nodes", node_id, "_repositories_metering", max_archive_version ), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_repositories_metering_info(self, node_id, params=None, headers=None): """ Returns cluster repositories metering information. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg node_id: A comma-separated list of node IDs or names to limit the returned information. """ if node_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'node_id'.") return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, "_repositories_metering"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/nodes.pyi000066400000000000000000000161311426163262700242660ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class NodesClient(NamespacedClient): async def reload_secure_settings( self, *, body: Optional[Mapping[str, Any]] = ..., node_id: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def info( self, *, node_id: Optional[Any] = ..., metric: Optional[Any] = ..., flat_settings: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stats( self, *, node_id: Optional[Any] = ..., metric: Optional[Any] = ..., index_metric: Optional[Any] = ..., completion_fields: Optional[Any] = ..., fielddata_fields: Optional[Any] = ..., fields: Optional[Any] = ..., groups: Optional[bool] = ..., include_segment_file_sizes: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., level: Optional[Any] = ..., timeout: Optional[Any] = ..., types: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def hot_threads( self, *, node_id: Optional[Any] = ..., ignore_idle_threads: Optional[bool] = ..., interval: Optional[Any] = ..., snapshots: Optional[Any] = ..., sort: Optional[Any] = ..., threads: Optional[Any] = ..., timeout: Optional[Any] = ..., type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> str: ... async def usage( self, *, node_id: Optional[Any] = ..., metric: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_repositories_metering_archive( self, *, node_id: Any, max_archive_version: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_repositories_metering_info( self, *, node_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/remote.py000066400000000000000000000022621426163262700243000ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() async def info(self, params=None, headers=None): """ ``_ """ return await self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/remote.pyi000066400000000000000000000027501426163262700244530ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class RemoteClient(NamespacedClient): async def info( self, *, timeout: Optional[Any] = None, pretty: Optional[bool] = None, human: Optional[bool] = None, error_trace: Optional[bool] = None, format: Optional[str] = None, filter_path: Optional[Union[str, Collection[str]]] = None, http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = None, headers: Optional[MutableMapping[str, str]] = None, ) -> Any: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/rollup.py000066400000000000000000000224741426163262700243310ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class RollupClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete_job(self, id, params=None, headers=None): """ Deletes an existing rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to delete """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_rollup", "job", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_jobs(self, id=None, params=None, headers=None): """ Retrieves the configuration, stats, and status of rollup jobs. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job(s) to fetch. Accepts glob patterns, or left blank for all jobs """ return await self.transport.perform_request( "GET", _make_path("_rollup", "job", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_rollup_caps(self, id=None, params=None, headers=None): """ Returns the capabilities of any rollup jobs that have been configured for a specific index or index pattern. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the index to check rollup capabilities on, or left blank for all jobs """ return await self.transport.perform_request( "GET", _make_path("_rollup", "data", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_rollup_index_caps(self, index, params=None, headers=None): """ Returns the rollup capabilities of all jobs inside of a rollup index (e.g. the index where rollup data is stored). ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The rollup index or index pattern to obtain rollup capabilities from. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return await self.transport.perform_request( "GET", _make_path(index, "_rollup", "data"), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_job(self, id, body, params=None, headers=None): """ Creates a rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to create :arg body: The job configuration """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_rollup", "job", id), params=params, headers=headers, body=body, ) @query_params( "rest_total_hits_as_int", "typed_keys", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def rollup_search( self, index, body, doc_type=None, params=None, headers=None ): """ Enables searching rolled-up data using the standard query DSL. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The indices or index-pattern(s) (containing rollup or regular data) that should be searched :arg body: The search request body :arg doc_type: The doc type inside the index :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path(index, doc_type, "_rollup_search"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def start_job(self, id, params=None, headers=None): """ Starts an existing, stopped rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to start """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "POST", _make_path("_rollup", "job", id, "_start"), params=params, headers=headers, ) @query_params( "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) async def stop_job(self, id, params=None, headers=None): """ Stops an existing, started rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to stop :arg timeout: Block for (at maximum) the specified duration while waiting for the job to stop. Defaults to 30s. :arg wait_for_completion: True if the API should block until the job has fully stopped, false if should be executed async. Defaults to false. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "POST", _make_path("_rollup", "job", id, "_stop"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def rollup(self, index, rollup_index, body, params=None, headers=None): """ Rollup an index ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The index to roll up :arg rollup_index: The name of the rollup index to create :arg body: The rollup configuration """ for param in (index, rollup_index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path(index, "_rollup", rollup_index), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/rollup.pyi000066400000000000000000000173171426163262700245020ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class RollupClient(NamespacedClient): async def delete_job( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_jobs( self, *, id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_rollup_caps( self, *, id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_rollup_index_caps( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_job( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def rollup_search( self, *, index: Any, body: Mapping[str, Any], doc_type: Optional[Any] = ..., rest_total_hits_as_int: Optional[bool] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def start_job( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stop_job( self, *, id: Any, timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def rollup( self, *, index: Any, rollup_index: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/searchable_snapshots.py000066400000000000000000000144271426163262700272060ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SearchableSnapshotsClient(NamespacedClient): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) async def clear_cache(self, index=None, params=None, headers=None): """ Clear the cache of searchable snapshots. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: A comma-separated list of index names :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return await self.transport.perform_request( "POST", _make_path(index, "_searchable_snapshots", "cache", "clear"), params=params, headers=headers, ) @query_params( "master_timeout", "storage", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def mount(self, repository, snapshot, body, params=None, headers=None): """ Mount a snapshot as a searchable index. ``_ :arg repository: The name of the repository containing the snapshot of the index to mount :arg snapshot: The name of the snapshot of the index to mount :arg body: The restore configuration for mounting the snapshot as searchable :arg master_timeout: Explicit operation timeout for connection to master node :arg storage: Selects the kind of local storage used to accelerate searches. Experimental, and defaults to `full_copy` :arg wait_for_completion: Should this request wait until the operation has completed before returning """ for param in (repository, snapshot, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_snapshot", repository, snapshot, "_mount"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def repository_stats(self, repository, params=None, headers=None): """ DEPRECATED: This API is replaced by the Repositories Metering API. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg repository: The repository for which to get the stats for """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return await self.transport.perform_request( "GET", _make_path("_snapshot", repository, "_stats"), params=params, headers=headers, ) @query_params( "level", response_mimetypes=["application/json"], ) async def stats(self, index=None, params=None, headers=None): """ Retrieve shard-level statistics about searchable snapshots. ``_ :arg index: A comma-separated list of index names :arg level: Return stats aggregated at cluster, index or shard level Valid choices: cluster, indices, shards Default: indices """ return await self.transport.perform_request( "GET", _make_path(index, "_searchable_snapshots", "stats"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def cache_stats(self, node_id=None, params=None, headers=None): """ Retrieve node-level cache statistics about searchable snapshots. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes """ return await self.transport.perform_request( "GET", _make_path("_searchable_snapshots", node_id, "cache", "stats"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/searchable_snapshots.pyi000066400000000000000000000117021426163262700273500ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SearchableSnapshotsClient(NamespacedClient): async def clear_cache( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def mount( self, *, repository: Any, snapshot: Any, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., storage: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def repository_stats( self, *, repository: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stats( self, *, index: Optional[Any] = ..., level: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def cache_stats( self, *, node_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/security.py000066400000000000000000001100711426163262700246520ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def authenticate(self, params=None, headers=None): """ Enables authentication as a user and retrieve information about the authenticated user. ``_ """ return await self.transport.perform_request( "GET", "/_security/_authenticate", params=params, headers=headers ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def change_password(self, body, username=None, params=None, headers=None): """ Changes the passwords of users in the native realm and built-in users. ``_ :arg body: the new password for the user :arg username: The username of the user to change the password for :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", _make_path("_security", "user", username, "_password"), params=params, headers=headers, body=body, ) @query_params( "usernames", response_mimetypes=["application/json"], ) async def clear_cached_realms(self, realms, params=None, headers=None): """ Evicts users from the user cache. Can completely clear the cache or evict specific users. ``_ :arg realms: Comma-separated list of realms to clear :arg usernames: Comma-separated list of usernames to clear from the cache """ if realms in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'realms'.") return await self.transport.perform_request( "POST", _make_path("_security", "realm", realms, "_clear_cache"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def clear_cached_roles(self, name, params=None, headers=None): """ Evicts roles from the native role cache. ``_ :arg name: Role name """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "POST", _make_path("_security", "role", name, "_clear_cache"), params=params, headers=headers, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def create_api_key(self, body, params=None, headers=None): """ Creates an API key for access without requiring basic authentication. ``_ :arg body: The api key request to create an API key :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", "/_security/api_key", params=params, headers=headers, body=body ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def delete_privileges(self, application, name, params=None, headers=None): """ Removes application privileges. ``_ :arg application: Application name :arg name: Privilege name :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (application, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path("_security", "privilege", application, name), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def delete_role(self, name, params=None, headers=None): """ Removes roles in the native realm. ``_ :arg name: Role name :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_security", "role", name), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def delete_role_mapping(self, name, params=None, headers=None): """ Removes role mappings. ``_ :arg name: Role-mapping name :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( "DELETE", _make_path("_security", "role_mapping", name), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def delete_user(self, username, params=None, headers=None): """ Deletes users from the native realm. ``_ :arg username: username :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") return await self.transport.perform_request( "DELETE", _make_path("_security", "user", username), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def disable_user(self, username, params=None, headers=None): """ Disables users in the native realm. ``_ :arg username: The username of the user to disable :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") return await self.transport.perform_request( "PUT", _make_path("_security", "user", username, "_disable"), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def enable_user(self, username, params=None, headers=None): """ Enables users in the native realm. ``_ :arg username: The username of the user to enable :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") return await self.transport.perform_request( "PUT", _make_path("_security", "user", username, "_enable"), params=params, headers=headers, ) @query_params( "id", "name", "owner", "realm_name", "username", response_mimetypes=["application/json"], ) async def get_api_key(self, params=None, headers=None): """ Retrieves information for one or more API keys. ``_ :arg id: API key id of the API key to be retrieved :arg name: API key name of the API key to be retrieved :arg owner: flag to query API keys owned by the currently authenticated user :arg realm_name: realm name of the user who created this API key to be retrieved :arg username: user name of the user who created this API key to be retrieved """ return await self.transport.perform_request( "GET", "/_security/api_key", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_privileges( self, application=None, name=None, params=None, headers=None ): """ Retrieves application privileges. ``_ :arg application: Application name :arg name: Privilege name """ return await self.transport.perform_request( "GET", _make_path("_security", "privilege", application, name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_role(self, name=None, params=None, headers=None): """ Retrieves roles in the native realm. ``_ :arg name: A comma-separated list of role names """ return await self.transport.perform_request( "GET", _make_path("_security", "role", name), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_role_mapping(self, name=None, params=None, headers=None): """ Retrieves role mappings. ``_ :arg name: A comma-separated list of role-mapping names """ return await self.transport.perform_request( "GET", _make_path("_security", "role_mapping", name), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_token(self, body, params=None, headers=None): """ Creates a bearer token for access without requiring basic authentication. ``_ :arg body: The token request to get """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_security/oauth2/token", params=params, headers=headers, body=body ) @query_params( response_mimetypes=["application/json"], ) async def get_user(self, username=None, params=None, headers=None): """ Retrieves information about users in the native realm and built-in users. ``_ :arg username: A comma-separated list of usernames """ return await self.transport.perform_request( "GET", _make_path("_security", "user", username), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_user_privileges(self, params=None, headers=None): """ Retrieves security privileges for the logged in user. ``_ """ return await self.transport.perform_request( "GET", "/_security/user/_privileges", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def has_privileges(self, body, user=None, params=None, headers=None): """ Determines whether the specified user has a specified list of privileges. ``_ :arg body: The privileges to test :arg user: Username """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", _make_path("_security", "user", user, "_has_privileges"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def invalidate_api_key(self, body, params=None, headers=None): """ Invalidates one or more API keys. ``_ :arg body: The api key request to invalidate API key(s) """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "DELETE", "/_security/api_key", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def invalidate_token(self, body, params=None, headers=None): """ Invalidates one or more access tokens or refresh tokens. ``_ :arg body: The token to invalidate """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "DELETE", "/_security/oauth2/token", params=params, headers=headers, body=body, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_privileges(self, body, params=None, headers=None): """ Adds or updates application privileges. ``_ :arg body: The privilege(s) to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", "/_security/privilege/", params=params, headers=headers, body=body ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_role(self, name, body, params=None, headers=None): """ Adds and updates roles in the native realm. ``_ :arg name: Role name :arg body: The role to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_security", "role", name), params=params, headers=headers, body=body, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_role_mapping(self, name, body, params=None, headers=None): """ Creates and updates role mappings. ``_ :arg name: Role-mapping name :arg body: The role mapping to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_security", "role_mapping", name), params=params, headers=headers, body=body, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_user(self, username, body, params=None, headers=None): """ Adds and updates users in the native realm. These users are commonly referred to as native users. ``_ :arg username: The username of the User :arg body: The user to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (username, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_security", "user", username), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def get_builtin_privileges(self, params=None, headers=None): """ Retrieves the list of cluster privileges and index privileges that are available in this version of Elasticsearch. ``_ """ return await self.transport.perform_request( "GET", "/_security/privilege/_builtin", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def clear_cached_privileges(self, application, params=None, headers=None): """ Evicts application privileges from the native application privileges cache. ``_ :arg application: A comma-separated list of application names """ if application in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'application'." ) return await self.transport.perform_request( "POST", _make_path("_security", "privilege", application, "_clear_cache"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def clear_api_key_cache(self, ids, params=None, headers=None): """ Clear a subset or all entries from the API key cache. ``_ :arg ids: A comma-separated list of IDs of API keys to clear from the cache """ if ids in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'ids'.") return await self.transport.perform_request( "POST", _make_path("_security", "api_key", ids, "_clear_cache"), params=params, headers=headers, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def grant_api_key(self, body, params=None, headers=None): """ Creates an API key on behalf of another user. ``_ :arg body: The api key request to create an API key :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_security/api_key/grant", params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def clear_cached_service_tokens( self, namespace, service, name, params=None, headers=None ): """ Evicts tokens from the service account token caches. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name :arg name: A comma-separated list of service token names """ for param in (namespace, service, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path( "_security", "service", namespace, service, "credential", "token", name, "_clear_cache", ), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def create_service_token( self, namespace, service, name=None, params=None, headers=None ): """ Creates a service account token for access without requiring basic authentication. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name :arg name: An identifier for the token name :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` (the default) then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (namespace, service): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path( "_security", "service", namespace, service, "credential", "token", name ), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) async def delete_service_token( self, namespace, service, name, params=None, headers=None ): """ Deletes a service account token. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name :arg name: An identifier for the token name :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` (the default) then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (namespace, service, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path( "_security", "service", namespace, service, "credential", "token", name ), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_service_accounts( self, namespace=None, service=None, params=None, headers=None ): """ Retrieves information about service accounts. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name """ return await self.transport.perform_request( "GET", _make_path("_security", "service", namespace, service), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_service_credentials( self, namespace, service, params=None, headers=None ): """ Retrieves information of all service credentials for a service account. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name """ for param in (namespace, service): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "GET", _make_path("_security", "service", namespace, service, "credential"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def saml_complete_logout(self, body, params=None, headers=None): """ Verifies the logout response sent from the SAML IdP ``_ :arg body: The logout response to verify """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_security/saml/complete_logout", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def saml_authenticate(self, body, params=None, headers=None): """ Exchanges a SAML Response message for an Elasticsearch access token and refresh token pair ``_ :arg body: The SAML response to authenticate """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_security/saml/authenticate", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def saml_invalidate(self, body, params=None, headers=None): """ Consumes a SAML LogoutRequest ``_ :arg body: The LogoutRequest message """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_security/saml/invalidate", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def saml_logout(self, body, params=None, headers=None): """ Invalidates an access token and a refresh token that were generated via the SAML Authenticate API ``_ :arg body: The tokens to invalidate """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_security/saml/logout", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def saml_prepare_authentication(self, body, params=None, headers=None): """ Creates a SAML authentication request ``_ :arg body: The realm for which to create the authentication request, identified by either its name or the ACS URL """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_security/saml/prepare", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def saml_service_provider_metadata( self, realm_name, params=None, headers=None ): """ Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider ``_ :arg realm_name: The name of the SAML realm to get the metadata for """ if realm_name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'realm_name'.") return await self.transport.perform_request( "GET", _make_path("_security", "saml", "metadata", realm_name), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def query_api_keys(self, body=None, params=None, headers=None): """ Retrieves information for API keys using a subset of query DSL ``_ :arg body: From, size, query, sort and search_after """ return await self.transport.perform_request( "POST", "/_security/_query/api_key", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/security.pyi000066400000000000000000000772641426163262700250430ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SecurityClient(NamespacedClient): async def authenticate( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def change_password( self, *, body: Mapping[str, Any], username: Optional[Any] = ..., refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_cached_realms( self, *, realms: Any, usernames: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_cached_roles( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def create_api_key( self, *, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_privileges( self, *, application: Any, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_role( self, *, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_role_mapping( self, *, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_user( self, *, username: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def disable_user( self, *, username: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def enable_user( self, *, username: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_api_key( self, *, id: Optional[Any] = ..., name: Optional[Any] = ..., owner: Optional[bool] = ..., realm_name: Optional[Any] = ..., username: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_privileges( self, *, application: Optional[Any] = ..., name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_role( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_role_mapping( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_token( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_user( self, *, username: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_user_privileges( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def has_privileges( self, *, body: Mapping[str, Any], user: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def invalidate_api_key( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def invalidate_token( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_privileges( self, *, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_role( self, *, name: Any, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_role_mapping( self, *, name: Any, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_user( self, *, username: Any, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_builtin_privileges( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_cached_privileges( self, *, application: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_api_key_cache( self, *, ids: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def grant_api_key( self, *, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clear_cached_service_tokens( self, *, namespace: Any, service: Any, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def create_service_token( self, *, namespace: Any, service: Any, name: Optional[Any] = ..., refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_service_token( self, *, namespace: Any, service: Any, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_service_accounts( self, *, namespace: Optional[Any] = ..., service: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_service_credentials( self, *, namespace: Any, service: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def saml_complete_logout( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def saml_authenticate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def saml_invalidate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def saml_logout( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def saml_prepare_authentication( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def saml_service_provider_metadata( self, *, realm_name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def query_api_keys( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/shutdown.py000066400000000000000000000065051426163262700246640ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ShutdownClient(NamespacedClient): @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def delete_node(self, node_id, params=None, headers=None): """ Removes a node from the shutdown list. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg node_id: The node id of node to be removed from the shutdown state """ if node_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'node_id'.") return await self.transport.perform_request( "DELETE", _make_path("_nodes", node_id, "shutdown"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def get_node(self, node_id=None, params=None, headers=None): """ Retrieve status of a node or nodes that are currently marked as shutting down. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg node_id: Which node for which to retrieve the shutdown status """ return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, "shutdown"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_node(self, node_id, body, params=None, headers=None): """ Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg node_id: The node id of node to be shut down :arg body: The shutdown type definition to register """ for param in (node_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_nodes", node_id, "shutdown"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/shutdown.pyi000066400000000000000000000062241426163262700250330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class ShutdownClient(NamespacedClient): async def delete_node( self, *, node_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_node( self, *, node_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_node( self, *, node_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/slm.py000066400000000000000000000143121426163262700235770ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SlmClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def delete_lifecycle(self, policy_id, params=None, headers=None): """ Deletes an existing snapshot lifecycle policy. ``_ :arg policy_id: The id of the snapshot lifecycle policy to remove """ if policy_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy_id'.") return await self.transport.perform_request( "DELETE", _make_path("_slm", "policy", policy_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def execute_lifecycle(self, policy_id, params=None, headers=None): """ Immediately creates a snapshot according to the lifecycle policy, without waiting for the scheduled time. ``_ :arg policy_id: The id of the snapshot lifecycle policy to be executed """ if policy_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy_id'.") return await self.transport.perform_request( "PUT", _make_path("_slm", "policy", policy_id, "_execute"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def execute_retention(self, params=None, headers=None): """ Deletes any snapshots that are expired according to the policy's retention rules. ``_ """ return await self.transport.perform_request( "POST", "/_slm/_execute_retention", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_lifecycle(self, policy_id=None, params=None, headers=None): """ Retrieves one or more snapshot lifecycle policy definitions and information about the latest snapshot attempts. ``_ :arg policy_id: Comma-separated list of snapshot lifecycle policies to retrieve """ return await self.transport.perform_request( "GET", _make_path("_slm", "policy", policy_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def get_stats(self, params=None, headers=None): """ Returns global and policy-level statistics about actions taken by snapshot lifecycle management. ``_ """ return await self.transport.perform_request( "GET", "/_slm/stats", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_lifecycle(self, policy_id, body=None, params=None, headers=None): """ Creates or updates a snapshot lifecycle policy. ``_ :arg policy_id: The id of the snapshot lifecycle policy :arg body: The snapshot lifecycle policy definition to register """ if policy_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy_id'.") return await self.transport.perform_request( "PUT", _make_path("_slm", "policy", policy_id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def get_status(self, params=None, headers=None): """ Retrieves the status of snapshot lifecycle management (SLM). ``_ """ return await self.transport.perform_request( "GET", "/_slm/status", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def start(self, params=None, headers=None): """ Turns on snapshot lifecycle management (SLM). ``_ """ return await self.transport.perform_request( "POST", "/_slm/start", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def stop(self, params=None, headers=None): """ Turns off snapshot lifecycle management (SLM). ``_ """ return await self.transport.perform_request( "POST", "/_slm/stop", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/slm.pyi000066400000000000000000000165241426163262700237570ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SlmClient(NamespacedClient): async def delete_lifecycle( self, *, policy_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def execute_lifecycle( self, *, policy_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def execute_retention( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_lifecycle( self, *, policy_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_lifecycle( self, *, policy_id: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def start( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stop( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/snapshot.py000066400000000000000000000352011426163262700246430ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): @query_params( "master_timeout", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def create(self, repository, snapshot, body=None, params=None, headers=None): """ Creates a snapshot in a repository. ``_ :arg repository: A repository name :arg snapshot: A snapshot name :arg body: The snapshot definition :arg master_timeout: Explicit operation timeout for connection to master node :arg wait_for_completion: Should this request wait until the operation has completed before returning """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_snapshot", repository, snapshot), params=params, headers=headers, body=body, ) @query_params( "master_timeout", response_mimetypes=["application/json"], ) async def delete(self, repository, snapshot, params=None, headers=None): """ Deletes a snapshot. ``_ :arg repository: A repository name :arg snapshot: A snapshot name :arg master_timeout: Explicit operation timeout for connection to master node """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "DELETE", _make_path("_snapshot", repository, snapshot), params=params, headers=headers, ) @query_params( "ignore_unavailable", "include_repository", "index_details", "master_timeout", "verbose", response_mimetypes=["application/json"], ) async def get(self, repository, snapshot, params=None, headers=None): """ Returns information about a snapshot. ``_ :arg repository: A repository name :arg snapshot: A comma-separated list of snapshot names :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is thrown :arg include_repository: Whether to include the repository name in the snapshot info. Defaults to true. :arg index_details: Whether to include details of each index in the snapshot, if those details are available. Defaults to false. :arg master_timeout: Explicit operation timeout for connection to master node :arg verbose: Whether to show verbose snapshot info or only show the basic info found in the repository index blob """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "GET", _make_path("_snapshot", repository, snapshot), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def delete_repository(self, repository, params=None, headers=None): """ Deletes a repository. ``_ :arg repository: Name of the snapshot repository to unregister. Wildcard (`*`) patterns are supported. :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return await self.transport.perform_request( "DELETE", _make_path("_snapshot", repository), params=params, headers=headers, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) async def get_repository(self, repository=None, params=None, headers=None): """ Returns information about a repository. ``_ :arg repository: A comma-separated list of repository names :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return await self.transport.perform_request( "GET", _make_path("_snapshot", repository), params=params, headers=headers ) @query_params( "master_timeout", "timeout", "verify", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def create_repository(self, repository, body, params=None, headers=None): """ Creates a repository. ``_ :arg repository: A repository name :arg body: The repository definition :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout :arg verify: Whether to verify the repository after creation """ for param in (repository, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_snapshot", repository), params=params, headers=headers, body=body, ) @query_params( "master_timeout", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def restore(self, repository, snapshot, body=None, params=None, headers=None): """ Restores a snapshot. ``_ :arg repository: A repository name :arg snapshot: A snapshot name :arg body: Details of what to restore :arg master_timeout: Explicit operation timeout for connection to master node :arg wait_for_completion: Should this request wait until the operation has completed before returning """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_snapshot", repository, snapshot, "_restore"), params=params, headers=headers, body=body, ) @query_params( "ignore_unavailable", "master_timeout", response_mimetypes=["application/json"], ) async def status(self, repository=None, snapshot=None, params=None, headers=None): """ Returns information about the status of a snapshot. ``_ :arg repository: A repository name :arg snapshot: A comma-separated list of snapshot names :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is thrown :arg master_timeout: Explicit operation timeout for connection to master node """ return await self.transport.perform_request( "GET", _make_path("_snapshot", repository, snapshot, "_status"), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def verify_repository(self, repository, params=None, headers=None): """ Verifies a repository. ``_ :arg repository: A repository name :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return await self.transport.perform_request( "POST", _make_path("_snapshot", repository, "_verify"), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) async def cleanup_repository(self, repository, params=None, headers=None): """ Removes stale data from repository. ``_ :arg repository: A repository name :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return await self.transport.perform_request( "POST", _make_path("_snapshot", repository, "_cleanup"), params=params, headers=headers, ) @query_params( "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def clone( self, repository, snapshot, target_snapshot, body, params=None, headers=None ): """ Clones indices from one snapshot into another snapshot in the same repository. ``_ :arg repository: A repository name :arg snapshot: The name of the snapshot to clone from :arg target_snapshot: The name of the cloned snapshot to create :arg body: The snapshot clone definition :arg master_timeout: Explicit operation timeout for connection to master node """ for param in (repository, snapshot, target_snapshot, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_snapshot", repository, snapshot, "_clone", target_snapshot), params=params, headers=headers, body=body, ) @query_params( "blob_count", "concurrency", "detailed", "early_read_node_count", "max_blob_size", "max_total_data_size", "rare_action_probability", "rarely_abort_writes", "read_node_count", "seed", "timeout", response_mimetypes=["application/json"], ) async def repository_analyze(self, repository, params=None, headers=None): """ Analyzes a repository for correctness and performance ``_ :arg repository: A repository name :arg blob_count: Number of blobs to create during the test. Defaults to 100. :arg concurrency: Number of operations to run concurrently during the test. Defaults to 10. :arg detailed: Whether to return detailed results or a summary. Defaults to 'false' so that only the summary is returned. :arg early_read_node_count: Number of nodes on which to perform an early read on a blob, i.e. before writing has completed. Early reads are rare actions so the 'rare_action_probability' parameter is also relevant. Defaults to 2. :arg max_blob_size: Maximum size of a blob to create during the test, e.g '1gb' or '100mb'. Defaults to '10mb'. :arg max_total_data_size: Maximum total size of all blobs to create during the test, e.g '1tb' or '100gb'. Defaults to '1gb'. :arg rare_action_probability: Probability of taking a rare action such as an early read or an overwrite. Defaults to 0.02. :arg rarely_abort_writes: Whether to rarely abort writes before they complete. Defaults to 'true'. :arg read_node_count: Number of nodes on which to read a blob after writing. Defaults to 10. :arg seed: Seed for the random number generator used to create the test workload. Defaults to a random value. :arg timeout: Explicit operation timeout. Defaults to '30s'. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return await self.transport.perform_request( "POST", _make_path("_snapshot", repository, "_analyze"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/snapshot.pyi000066400000000000000000000266621426163262700250270ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SnapshotClient(NamespacedClient): async def create( self, *, repository: Any, snapshot: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete( self, *, repository: Any, snapshot: Any, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get( self, *, repository: Any, snapshot: Any, ignore_unavailable: Optional[bool] = ..., include_repository: Optional[bool] = ..., index_details: Optional[bool] = ..., master_timeout: Optional[Any] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_repository( self, *, repository: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_repository( self, *, repository: Optional[Any] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def create_repository( self, *, repository: Any, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., verify: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def restore( self, *, repository: Any, snapshot: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def status( self, *, repository: Optional[Any] = ..., snapshot: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def verify_repository( self, *, repository: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def cleanup_repository( self, *, repository: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def clone( self, *, repository: Any, snapshot: Any, target_snapshot: Any, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def repository_analyze( self, *, repository: Any, blob_count: Optional[Any] = ..., concurrency: Optional[Any] = ..., detailed: Optional[bool] = ..., early_read_node_count: Optional[Any] = ..., max_blob_size: Optional[Any] = ..., max_total_data_size: Optional[Any] = ..., rare_action_probability: Optional[Any] = ..., rarely_abort_writes: Optional[bool] = ..., read_node_count: Optional[Any] = ..., seed: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/sql.py000066400000000000000000000130601426163262700236020ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SqlClient(NamespacedClient): @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def clear_cursor(self, body, params=None, headers=None): """ Clears the SQL cursor ``_ :arg body: Specify the cursor value in the `cursor` element to clean the cursor. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_sql/close", params=params, headers=headers, body=body ) @query_params( "format", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def query(self, body, params=None, headers=None): """ Executes a SQL request ``_ :arg body: Use the `query` element to start a query. Use the `cursor` element to continue a query. :arg format: a short version of the Accept header, e.g. json, yaml """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_sql", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def translate(self, body, params=None, headers=None): """ Translates SQL into Elasticsearch queries ``_ :arg body: Specify the query in the `query` element. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "POST", "/_sql/translate", params=params, headers=headers, body=body ) @query_params( response_mimetypes=["application/json"], ) async def delete_async(self, id, params=None, headers=None): """ Deletes an async SQL search or a stored synchronous SQL search. If the search is still running, the API cancels it. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_sql", "async", "delete", id), params=params, headers=headers, ) @query_params( "delimiter", "format", "keep_alive", "wait_for_completion_timeout", response_mimetypes=["application/json"], ) async def get_async(self, id, params=None, headers=None): """ Returns the current status and available results for an async SQL search or stored synchronous SQL search ``_ :arg id: The async search ID :arg delimiter: Separator for CSV results Default: , :arg format: Short version of the Accept header, e.g. json, yaml :arg keep_alive: Retention period for the search and its results Default: 5d :arg wait_for_completion_timeout: Duration to wait for complete results """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_sql", "async", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) async def get_async_status(self, id, params=None, headers=None): """ Returns the current status of an async SQL search or a stored synchronous SQL search ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_sql", "async", "status", id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/sql.pyi000066400000000000000000000126061426163262700237600ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SqlClient(NamespacedClient): async def clear_cursor( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def query( self, *, body: Mapping[str, Any], format: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def translate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_async( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_async( self, *, id: Any, delimiter: Optional[Any] = ..., format: Optional[Any] = ..., keep_alive: Optional[Any] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_async_status( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/ssl.py000066400000000000000000000025441426163262700236110ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class SslClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def certificates(self, params=None, headers=None): """ Retrieves information about the X.509 certificates used to encrypt communications in the cluster. ``_ """ return await self.transport.perform_request( "GET", "/_ssl/certificates", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/ssl.pyi000066400000000000000000000031611426163262700237560ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class SslClient(NamespacedClient): async def certificates( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/tasks.py000066400000000000000000000120461426163262700241330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import warnings from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class TasksClient(NamespacedClient): @query_params( "actions", "detailed", "group_by", "nodes", "parent_task_id", "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) async def list(self, params=None, headers=None): """ Returns a list of tasks. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg actions: A comma-separated list of actions that should be returned. Leave empty to return all. :arg detailed: Return detailed task information (default: false) :arg group_by: Group tasks by nodes or parent/child relationships Valid choices: nodes, parents, none Default: nodes :arg nodes: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg timeout: Explicit operation timeout :arg wait_for_completion: Wait for the matching tasks to complete (default: false) """ return await self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers ) @query_params( "actions", "nodes", "parent_task_id", "wait_for_completion", response_mimetypes=["application/json"], ) async def cancel(self, task_id=None, params=None, headers=None): """ Cancels a task, if it can be cancelled through an API. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg task_id: Cancel the task with specified task id (node_id:task_number) :arg actions: A comma-separated list of actions that should be cancelled. Leave empty to cancel all. :arg nodes: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. :arg wait_for_completion: Should the request block until the cancellation of the task and its descendant tasks is completed. Defaults to false """ return await self.transport.perform_request( "POST", _make_path("_tasks", task_id, "_cancel"), params=params, headers=headers, ) @query_params( "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) async def get(self, task_id=None, params=None, headers=None): """ Returns information about a task. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg task_id: Return the task with specified id (node_id:task_number) :arg timeout: Explicit operation timeout :arg wait_for_completion: Wait for the matching tasks to complete (default: false) """ if task_id in SKIP_IN_PATH: warnings.warn( "Calling client.tasks.get() without a task_id is deprecated " "and will be removed in v8.0. Use client.tasks.list() instead.", category=DeprecationWarning, stacklevel=3, ) return await self.transport.perform_request( "GET", _make_path("_tasks", task_id), params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/tasks.pyi000066400000000000000000000071201426163262700243010ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class TasksClient(NamespacedClient): async def list( self, *, actions: Optional[Any] = ..., detailed: Optional[bool] = ..., group_by: Optional[Any] = ..., nodes: Optional[Any] = ..., parent_task_id: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def cancel( self, *, task_id: Optional[Any] = ..., actions: Optional[Any] = ..., nodes: Optional[Any] = ..., parent_task_id: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get( self, *, task_id: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/text_structure.py000066400000000000000000000077061426163262700261210ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, query_params class TextStructureClient(NamespacedClient): @query_params( "charset", "column_names", "delimiter", "explain", "format", "grok_pattern", "has_header_row", "line_merge_size_limit", "lines_to_sample", "quote", "should_trim_fields", "timeout", "timestamp_field", "timestamp_format", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) async def find_structure(self, body, params=None, headers=None): """ Finds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch. ``_ :arg body: The contents of the file to be analyzed :arg charset: Optional parameter to specify the character set of the file :arg column_names: Optional parameter containing a comma separated list of the column names for a delimited file :arg delimiter: Optional parameter to specify the delimiter character for a delimited file - must be a single character :arg explain: Whether to include a commentary on how the structure was derived :arg format: Optional parameter to specify the high level file format Valid choices: ndjson, xml, delimited, semi_structured_text :arg grok_pattern: Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi- structured text file :arg has_header_row: Optional parameter to specify whether a delimited file includes the column names in its first row :arg line_merge_size_limit: Maximum number of characters permitted in a single message when lines are merged to create messages. Default: 10000 :arg lines_to_sample: How many lines of the file should be included in the analysis Default: 1000 :arg quote: Optional parameter to specify the quote character for a delimited file - must be a single character :arg should_trim_fields: Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them :arg timeout: Timeout after which the analysis will be aborted Default: 25s :arg timestamp_field: Optional parameter to specify the timestamp field in the file :arg timestamp_format: Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return await self.transport.perform_request( "POST", "/_text_structure/find_structure", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/text_structure.pyi000066400000000000000000000044531426163262700262660ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class TextStructureClient(NamespacedClient): async def find_structure( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], charset: Optional[Any] = ..., column_names: Optional[Any] = ..., delimiter: Optional[Any] = ..., explain: Optional[bool] = ..., format: Optional[Any] = ..., grok_pattern: Optional[Any] = ..., has_header_row: Optional[bool] = ..., line_merge_size_limit: Optional[Any] = ..., lines_to_sample: Optional[Any] = ..., quote: Optional[Any] = ..., should_trim_fields: Optional[bool] = ..., timeout: Optional[Any] = ..., timestamp_field: Optional[Any] = ..., timestamp_format: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/transform.py000066400000000000000000000246471426163262700250330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class TransformClient(NamespacedClient): @query_params( "force", "timeout", response_mimetypes=["application/json"], ) async def delete_transform(self, transform_id, params=None, headers=None): """ Deletes an existing transform. ``_ :arg transform_id: The id of the transform to delete :arg force: When `true`, the transform is deleted regardless of its current state. The default value is `false`, meaning that the transform must be `stopped` before it can be deleted. :arg timeout: Controls the time to wait for the transform deletion """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return await self.transport.perform_request( "DELETE", _make_path("_transform", transform_id), params=params, headers=headers, ) @query_params( "allow_no_match", "exclude_generated", "from_", "size", response_mimetypes=["application/json"], ) async def get_transform(self, transform_id=None, params=None, headers=None): """ Retrieves configuration information for transforms. ``_ :arg transform_id: The id or comma delimited list of id expressions of the transforms to get, '_all' or '*' implies get all transforms :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg exclude_generated: Omits fields that are illegal to set on transform PUT :arg from_: skips a number of transform configs, defaults to 0 :arg size: specifies a max number of transforms to get, defaults to 100 """ if "from_" in params: params["from"] = params.pop("from_") return await self.transport.perform_request( "GET", _make_path("_transform", transform_id), params=params, headers=headers, ) @query_params( "allow_no_match", "from_", "size", response_mimetypes=["application/json"], ) async def get_transform_stats(self, transform_id, params=None, headers=None): """ Retrieves usage information for transforms. ``_ :arg transform_id: The id of the transform for which to get stats. '_all' or '*' implies all transforms :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg from_: skips a number of transform stats, defaults to 0 :arg size: specifies a max number of transform stats to get, defaults to 100 """ if "from_" in params: params["from"] = params.pop("from_") if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return await self.transport.perform_request( "GET", _make_path("_transform", transform_id, "_stats"), params=params, headers=headers, ) @query_params( "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def preview_transform( self, body=None, transform_id=None, params=None, headers=None ): """ Previews a transform. ``_ :arg body: The definition for the transform to preview :arg transform_id: The id of the transform to preview. :arg timeout: Controls the time to wait for the preview """ return await self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_preview"), params=params, headers=headers, body=body, ) @query_params( "defer_validation", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_transform(self, transform_id, body, params=None, headers=None): """ Instantiates a transform. ``_ :arg transform_id: The id of the new transform. :arg body: The transform definition :arg defer_validation: If validations should be deferred until transform starts, defaults to false. :arg timeout: Controls the time to wait for the transform to start """ for param in (transform_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "PUT", _make_path("_transform", transform_id), params=params, headers=headers, body=body, ) @query_params( "timeout", response_mimetypes=["application/json"], ) async def start_transform(self, transform_id, params=None, headers=None): """ Starts one or more transforms. ``_ :arg transform_id: The id of the transform to start :arg timeout: Controls the time to wait for the transform to start """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return await self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_start"), params=params, headers=headers, ) @query_params( "allow_no_match", "force", "timeout", "wait_for_checkpoint", "wait_for_completion", response_mimetypes=["application/json"], ) async def stop_transform(self, transform_id, params=None, headers=None): """ Stops one or more transforms. ``_ :arg transform_id: The id of the transform to stop :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg force: Whether to force stop a failed transform or not. Default to false :arg timeout: Controls the time to wait until the transform has stopped. Default to 30 seconds :arg wait_for_checkpoint: Whether to wait for the transform to reach a checkpoint before stopping. Default to false :arg wait_for_completion: Whether to wait for the transform to fully stop before returning or not. Default to false """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return await self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_stop"), params=params, headers=headers, ) @query_params( "defer_validation", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def update_transform(self, transform_id, body, params=None, headers=None): """ Updates certain properties of a transform. ``_ :arg transform_id: The id of the transform. :arg body: The update transform definition :arg defer_validation: If validations should be deferred until transform starts, defaults to false. :arg timeout: Controls the time to wait for the update """ for param in (transform_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return await self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_update"), params=params, headers=headers, body=body, ) @query_params( "dry_run", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def upgrade_transforms(self, params=None, headers=None): """ Upgrades all transforms. ``_ :arg dry_run: Whether to only check for updates but don't execute :arg timeout: Controls the time to wait for the upgrade """ return await self.transport.perform_request( "POST", "/_transform/_upgrade", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/transform.pyi000066400000000000000000000207231426163262700251730ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class TransformClient(NamespacedClient): async def delete_transform( self, *, transform_id: Any, force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_transform( self, *, transform_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_transform_stats( self, *, transform_id: Any, allow_no_match: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def preview_transform( self, *, body: Optional[Mapping[str, Any]] = ..., transform_id: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_transform( self, *, transform_id: Any, body: Mapping[str, Any], defer_validation: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def start_transform( self, *, transform_id: Any, timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stop_transform( self, *, transform_id: Any, allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., wait_for_checkpoint: Optional[bool] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def update_transform( self, *, transform_id: Any, body: Mapping[str, Any], defer_validation: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def upgrade_transforms( self, *, dry_run: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/utils.py000066400000000000000000000016711426163262700241500ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from ...client.utils import ( # noqa SKIP_IN_PATH, NamespacedClient, _bulk_body, _escape, _make_path, _normalize_hosts, query_params, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/utils.pyi000066400000000000000000000025541426163262700243220ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from ...client.utils import SKIP_IN_PATH as SKIP_IN_PATH from ...client.utils import _bulk_body as _bulk_body from ...client.utils import _escape as _escape from ...client.utils import _make_path as _make_path # noqa from ...client.utils import _normalize_hosts as _normalize_hosts from ...client.utils import query_params as query_params from ..client import AsyncElasticsearch from ..transport import AsyncTransport class NamespacedClient: client: AsyncElasticsearch def __init__(self, client: AsyncElasticsearch) -> None: ... @property def transport(self) -> AsyncTransport: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/watcher.py000066400000000000000000000207541426163262700244500ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class WatcherClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) async def ack_watch(self, watch_id, action_id=None, params=None, headers=None): """ Acknowledges a watch, manually throttling the execution of the watch's actions. ``_ :arg watch_id: Watch ID :arg action_id: A comma-separated list of the action ids to be acked """ if watch_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'watch_id'.") return await self.transport.perform_request( "PUT", _make_path("_watcher", "watch", watch_id, "_ack", action_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def activate_watch(self, watch_id, params=None, headers=None): """ Activates a currently inactive watch. ``_ :arg watch_id: Watch ID """ if watch_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'watch_id'.") return await self.transport.perform_request( "PUT", _make_path("_watcher", "watch", watch_id, "_activate"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def deactivate_watch(self, watch_id, params=None, headers=None): """ Deactivates a currently active watch. ``_ :arg watch_id: Watch ID """ if watch_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'watch_id'.") return await self.transport.perform_request( "PUT", _make_path("_watcher", "watch", watch_id, "_deactivate"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def delete_watch(self, id, params=None, headers=None): """ Removes a watch from Watcher. ``_ :arg id: Watch ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "DELETE", _make_path("_watcher", "watch", id), params=params, headers=headers, ) @query_params( "debug", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def execute_watch(self, body=None, id=None, params=None, headers=None): """ Forces the execution of a stored watch. ``_ :arg body: Execution control :arg id: Watch ID :arg debug: indicates whether the watch should execute in debug mode """ return await self.transport.perform_request( "PUT", _make_path("_watcher", "watch", id, "_execute"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def get_watch(self, id, params=None, headers=None): """ Retrieves a watch by its ID. ``_ :arg id: Watch ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "GET", _make_path("_watcher", "watch", id), params=params, headers=headers ) @query_params( "active", "if_primary_term", "if_seq_no", "version", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def put_watch(self, id, body=None, params=None, headers=None): """ Creates a new watch, or updates an existing one. ``_ :arg id: Watch ID :arg body: The watch :arg active: Specify whether the watch is in/active by default :arg if_primary_term: only update the watch if the last operation that has changed the watch has the specified primary term :arg if_seq_no: only update the watch if the last operation that has changed the watch has the specified sequence number :arg version: Explicit version number for concurrency control """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return await self.transport.perform_request( "PUT", _make_path("_watcher", "watch", id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) async def start(self, params=None, headers=None): """ Starts Watcher if it is not already running. ``_ """ return await self.transport.perform_request( "POST", "/_watcher/_start", params=params, headers=headers ) @query_params( "emit_stacktraces", response_mimetypes=["application/json"], ) async def stats(self, metric=None, params=None, headers=None): """ Retrieves the current Watcher metrics. ``_ :arg metric: Controls what additional stat metrics should be include in the response Valid choices: _all, queued_watches, current_watches, pending_watches :arg emit_stacktraces: Emits stack traces of currently running watches """ return await self.transport.perform_request( "GET", _make_path("_watcher", "stats", metric), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) async def stop(self, params=None, headers=None): """ Stops Watcher if it is running. ``_ """ return await self.transport.perform_request( "POST", "/_watcher/_stop", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) async def query_watches(self, body=None, params=None, headers=None): """ Retrieves stored watches. ``_ :arg body: From, size, query, sort and search_after """ return await self.transport.perform_request( "POST", "/_watcher/_query/watches", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/_async/client/watcher.pyi000066400000000000000000000222321426163262700246120ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class WatcherClient(NamespacedClient): async def ack_watch( self, *, watch_id: Any, action_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def activate_watch( self, *, watch_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def deactivate_watch( self, *, watch_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def delete_watch( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def execute_watch( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., debug: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def get_watch( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def put_watch( self, *, id: Any, body: Optional[Mapping[str, Any]] = ..., active: Optional[bool] = ..., if_primary_term: Optional[Any] = ..., if_seq_no: Optional[Any] = ..., version: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def start( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stats( self, *, metric: Optional[Any] = ..., emit_stacktraces: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def stop( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def query_watches( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/client/xpack.py000066400000000000000000000043601426163262700241140ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class XPackClient(NamespacedClient): def __getattr__(self, attr_name): return getattr(self.client, attr_name) # AUTO-GENERATED-API-DEFINITIONS # @query_params( "accept_enterprise", "categories", response_mimetypes=["application/json"], ) async def info(self, params=None, headers=None): """ Retrieves information about the installed X-Pack features. ``_ :arg accept_enterprise: If an enterprise license is installed, return the type and mode as 'enterprise' (default: false) :arg categories: Comma-separated list of info categories. Can be any of: build, license, features """ return await self.transport.perform_request( "GET", "/_xpack", params=params, headers=headers ) @query_params( "master_timeout", response_mimetypes=["application/json"], ) async def usage(self, params=None, headers=None): """ Retrieves usage information about the installed X-Pack features. ``_ :arg master_timeout: Specify timeout for watch write operation """ return await self.transport.perform_request( "GET", "/_xpack/usage", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/_async/client/xpack.pyi000066400000000000000000000050661426163262700242710ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class XPackClient(NamespacedClient): def __getattr__(self, attr_name: str) -> Any: return getattr(self.client, attr_name) # AUTO-GENERATED-API-DEFINITIONS # async def info( self, *, accept_enterprise: Optional[bool] = ..., categories: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... async def usage( self, *, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/_async/compat.py000066400000000000000000000026311426163262700230120ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import asyncio from ..compat import * # noqa # Hack supporting Python 3.6 asyncio which didn't have 'get_running_loop()'. # Essentially we want to get away from having users pass in a loop to us. # Instead we should call 'get_running_loop()' whenever we need # the currently running loop. # See: https://aiopg.readthedocs.io/en/stable/run_loop.html#implementation try: from asyncio import get_running_loop except ImportError: def get_running_loop(): loop = asyncio.get_event_loop() if not loop.is_running(): raise RuntimeError("no running event loop") return loop __all__ = ["get_running_loop"] elasticsearch-py-7.17.6/elasticsearch/_async/compat.pyi000066400000000000000000000015351426163262700231650ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import asyncio def get_running_loop() -> asyncio.AbstractEventLoop: ... elasticsearch-py-7.17.6/elasticsearch/_async/helpers.py000066400000000000000000000452211426163262700231730ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # Licensed to Elasticsearch B.V under one or more agreements. # Elasticsearch B.V licenses this file to you under the Apache 2.0 License. # See the LICENSE file in the project root for more information import asyncio import logging from ..compat import map from ..exceptions import NotFoundError, TransportError from ..helpers.actions import ( _ActionChunker, _add_helper_meta_to_kwargs, _process_bulk_chunk_error, _process_bulk_chunk_success, expand_action, ) from ..helpers.errors import ScanError from .client import AsyncElasticsearch # noqa logger = logging.getLogger("elasticsearch.helpers") async def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): """ Split actions into chunks by number or size, serialize them into strings in the process. """ chunker = _ActionChunker( chunk_size=chunk_size, max_chunk_bytes=max_chunk_bytes, serializer=serializer ) async for action, data in actions: ret = chunker.feed(action, data) if ret: yield ret ret = chunker.flush() if ret: yield ret async def _process_bulk_chunk( client, bulk_actions, bulk_data, raise_on_exception=True, raise_on_error=True, ignore_status=(), *args, **kwargs ): """ Send a bulk request to elasticsearch and process the output. """ if not isinstance(ignore_status, (list, tuple)): ignore_status = (ignore_status,) try: # send the actual request resp = await client.bulk(*args, body="\n".join(bulk_actions) + "\n", **kwargs) except TransportError as e: gen = _process_bulk_chunk_error( error=e, bulk_data=bulk_data, ignore_status=ignore_status, raise_on_exception=raise_on_exception, raise_on_error=raise_on_error, ) else: gen = _process_bulk_chunk_success( resp=resp, bulk_data=bulk_data, ignore_status=ignore_status, raise_on_error=raise_on_error, ) for item in gen: yield item def aiter(x): """Turns an async iterable or iterable into an async iterator""" if hasattr(x, "__anext__"): return x elif hasattr(x, "__aiter__"): return x.__aiter__() async def f(): for item in x: yield item return f().__aiter__() async def azip(*iterables): """Zips async iterables and iterables into an async iterator with the same behavior as zip() """ aiters = [aiter(x) for x in iterables] try: while True: yield tuple([await x.__anext__() for x in aiters]) except StopAsyncIteration: pass async def async_streaming_bulk( client, actions, chunk_size=500, max_chunk_bytes=100 * 1024 * 1024, raise_on_error=True, expand_action_callback=expand_action, raise_on_exception=True, max_retries=0, initial_backoff=2, max_backoff=600, yield_ok=True, ignore_status=(), *args, **kwargs ): """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use :func:`~elasticsearch.helpers.async_bulk` which is a wrapper around streaming bulk that returns summary information about the bulk operation once the entire input is consumed and sent. If you specify ``max_retries`` it will also retry any documents that were rejected with a ``429`` status code. To do this it will wait (**by calling asyncio.sleep**) for ``initial_backoff`` seconds and then, every subsequent rejection for the same chunk, for double the time every time up to ``max_backoff`` seconds. :arg client: instance of :class:`~elasticsearch.AsyncElasticsearch` to use :arg actions: iterable or async iterable containing the actions to be executed :arg chunk_size: number of docs in one chunk sent to es (default: 500) :arg max_chunk_bytes: the maximum size of the request in bytes (default: 100MB) :arg raise_on_error: raise ``BulkIndexError`` containing errors (as `.errors`) from the execution of the last chunk when some occur. By default we raise. :arg raise_on_exception: if ``False`` then don't propagate exceptions from call to ``bulk`` and just report the items that failed as failed. :arg expand_action_callback: callback executed on each action passed in, should return a tuple containing the action line and the data line (`None` if data line should be omitted). :arg max_retries: maximum number of times a document will be retried when ``429`` is received, set to 0 (default) for no retries on ``429`` :arg initial_backoff: number of seconds we should wait before the first retry. Any subsequent retries will be powers of ``initial_backoff * 2**retry_number`` :arg max_backoff: maximum number of seconds a retry will wait :arg yield_ok: if set to False will skip successful documents in the output :arg ignore_status: list of HTTP status code that you want to ignore """ async def map_actions(): async for item in aiter(actions): yield expand_action_callback(item) async for bulk_data, bulk_actions in _chunk_actions( map_actions(), chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): to_retry, to_retry_data = [], [] if attempt: await asyncio.sleep( min(max_backoff, initial_backoff * 2 ** (attempt - 1)) ) try: async for data, (ok, info) in azip( bulk_data, _process_bulk_chunk( client, bulk_actions, bulk_data, raise_on_exception, raise_on_error, ignore_status, *args, **kwargs, ), ): if not ok: action, info = info.popitem() # retry if retries enabled, we get 429, and we are not # in the last attempt if ( max_retries and info["status"] == 429 and (attempt + 1) <= max_retries ): # _process_bulk_chunk expects strings so we need to # re-serialize the data to_retry.extend( map(client.transport.serializer.dumps, data) ) to_retry_data.append(data) else: yield ok, {action: info} elif yield_ok: yield ok, info except TransportError as e: # suppress 429 errors since we will retry them if attempt == max_retries or e.status_code != 429: raise else: if not to_retry: break # retry only subset of documents that didn't succeed bulk_actions, bulk_data = to_retry, to_retry_data async def async_bulk( client, actions, stats_only=False, ignore_status=(), *args, **kwargs ): """ Helper for the :meth:`~elasticsearch.AsyncElasticsearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and sends them to elasticsearch in chunks. It returns a tuple with summary information - number of successfully executed actions and either list of errors or number of errors if ``stats_only`` is set to ``True``. Note that by default we raise a ``BulkIndexError`` when we encounter an error so options like ``stats_only`` only+ apply when ``raise_on_error`` is set to ``False``. When errors are being collected original document data is included in the error dictionary which can lead to an extra high memory usage. If you need to process a lot of data and want to ignore/collect errors please consider using the :func:`~elasticsearch.helpers.async_streaming_bulk` helper which will just return the errors and not store them in memory. :arg client: instance of :class:`~elasticsearch.AsyncElasticsearch` to use :arg actions: iterator containing the actions :arg stats_only: if `True` only report number of successful/failed operations instead of just number of successful and a list of error responses :arg ignore_status: list of HTTP status code that you want to ignore Any additional keyword arguments will be passed to :func:`~elasticsearch.helpers.async_streaming_bulk` which is used to execute the operation, see :func:`~elasticsearch.helpers.async_streaming_bulk` for more accepted parameters. """ success, failed = 0, 0 # list of errors to be collected is not stats_only errors = [] # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True async for ok, item in async_streaming_bulk( client, actions, ignore_status=ignore_status, *args, **kwargs ): # go through request-response pairs and detect failures if not ok: if not stats_only: errors.append(item) failed += 1 else: success += 1 return success, failed if stats_only else errors async def async_scan( client, query=None, scroll="5m", raise_on_error=True, preserve_order=False, size=1000, request_timeout=None, clear_scroll=True, scroll_kwargs=None, **kwargs ): """ Simple abstraction on top of the :meth:`~elasticsearch.AsyncElasticsearch.scroll` api - a simple iterator that yields all hits as returned by underlining scroll requests. By default scan does not return results in any pre-determined order. To have a standard order in the returned documents (either by score or explicit sort definition) when scrolling, use ``preserve_order=True``. This may be an expensive operation and will negate the performance benefits of using ``scan``. :arg client: instance of :class:`~elasticsearch.AsyncElasticsearch` to use :arg query: body for the :meth:`~elasticsearch.AsyncElasticsearch.search` api :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg raise_on_error: raises an exception (``ScanError``) if an error is encountered (some shards fail to execute). By default we raise. :arg preserve_order: don't set the ``search_type`` to ``scan`` - this will cause the scroll to paginate with preserving the order. Note that this can be an extremely expensive operation and can easily lead to unpredictable results, use with caution. :arg size: size (per shard) of the batch send at each iteration. :arg request_timeout: explicit timeout for each call to ``scan`` :arg clear_scroll: explicitly calls delete on the scroll id via the clear scroll API at the end of the method on completion or error, defaults to true. :arg scroll_kwargs: additional kwargs to be passed to :meth:`~elasticsearch.AsyncElasticsearch.scroll` Any additional keyword arguments will be passed to the initial :meth:`~elasticsearch.AsyncElasticsearch.search` call:: async_scan(es, query={"query": {"match": {"title": "python"}}}, index="orders-*", doc_type="books" ) """ scroll_kwargs = scroll_kwargs.copy() if scroll_kwargs else {} scroll_kwargs["scroll"] = scroll _add_helper_meta_to_kwargs(scroll_kwargs, "s") if not preserve_order: query = query.copy() if query else {} query["sort"] = "_doc" # Grab options that should be propagated to every # API call within this helper instead of just 'search()' transport_kwargs = {} for key in ("headers", "api_key", "http_auth"): if key in kwargs: transport_kwargs[key] = kwargs[key] # If the user is using 'scroll_kwargs' we want # to propagate there too, but to not break backwards # compatibility we'll not override anything already given. if scroll_kwargs is not None and transport_kwargs: for key, val in transport_kwargs.items(): scroll_kwargs.setdefault(key, val) # initial search search_kwargs = kwargs.copy() # Setting query={"from": ...} would make 'from' be used # as a keyword argument instead of 'from_'. We handle that here. if "from" in search_kwargs: search_kwargs["from_"] = search_kwargs.pop("from") if query: search_kwargs.update(query) if "from" in search_kwargs: search_kwargs["from_"] = search_kwargs.pop("from") search_kwargs["scroll"] = scroll search_kwargs["size"] = size search_kwargs["request_timeout"] = request_timeout _add_helper_meta_to_kwargs(search_kwargs, "s") resp = await client.search(**search_kwargs) scroll_id = resp.get("_scroll_id") try: while scroll_id and resp["hits"]["hits"]: for hit in resp["hits"]["hits"]: yield hit # Default to 0 if the value isn't included in the response shards_successful = resp["_shards"].get("successful", 0) shards_skipped = resp["_shards"].get("skipped", 0) shards_total = resp["_shards"].get("total", 0) # check if we have any errors if (shards_successful + shards_skipped) < shards_total: shards_message = "Scroll request has only succeeded on %d (+%d skipped) shards out of %d." logger.warning( shards_message, shards_successful, shards_skipped, shards_total, ) if raise_on_error: raise ScanError( scroll_id, shards_message % ( shards_successful, shards_skipped, shards_total, ), ) scroll_kwargs["scroll_id"] = scroll_id resp = await client.scroll(**scroll_kwargs) scroll_id = resp.get("_scroll_id") finally: if scroll_id and clear_scroll: await client.clear_scroll( scroll_id=scroll_id, **transport_kwargs, ignore=(404,), params={"__elastic_client_meta": (("h", "s"),)}, ) async def async_reindex( client, source_index, target_index, query=None, target_client=None, chunk_size=500, scroll="5m", op_type=None, scan_kwargs={}, bulk_kwargs={}, ): """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. If you don't specify the query you will reindex all the documents. Since ``2.3`` a :meth:`~elasticsearch.AsyncElasticsearch.reindex` api is available as part of elasticsearch itself. It is recommended to use the api instead of this helper wherever possible. The helper is here mostly for backwards compatibility and for situations where more flexibility is needed. .. note:: This helper doesn't transfer mappings, just the data. :arg client: instance of :class:`~elasticsearch.AsyncElasticsearch` to use (for read if `target_client` is specified as well) :arg source_index: index (or list of indices) to read documents from :arg target_index: name of the index in the target cluster to populate :arg query: body for the :meth:`~elasticsearch.AsyncElasticsearch.search` api :arg target_client: optional, is specified will be used for writing (thus enabling reindex between clusters) :arg chunk_size: number of docs in one chunk sent to es (default: 500) :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg op_type: Explicit operation type. Defaults to '_index'. Data streams must be set to 'create'. If not specified, will auto-detect if target_index is a data stream. :arg scan_kwargs: additional kwargs to be passed to :func:`~elasticsearch.helpers.async_scan` :arg bulk_kwargs: additional kwargs to be passed to :func:`~elasticsearch.helpers.async_bulk` """ target_client = client if target_client is None else target_client docs = async_scan( client, query=query, index=source_index, scroll=scroll, **scan_kwargs ) async def _change_doc_index(hits, index, op_type): async for h in hits: h["_index"] = index if op_type is not None: h["_op_type"] = op_type if "fields" in h: h.update(h.pop("fields")) yield h kwargs = {"stats_only": True} kwargs.update(bulk_kwargs) is_data_stream = False try: # Verify if the target_index is data stream or index data_streams = await target_client.indices.get_data_stream( target_index, expand_wildcards="all" ) is_data_stream = any( data_stream["name"] == target_index for data_stream in data_streams["data_streams"] ) except (TransportError, KeyError, NotFoundError): # If its not data stream, might be index pass if is_data_stream: if op_type not in (None, "create"): raise ValueError("Data streams must have 'op_type' set to 'create'") else: op_type = "create" return await async_bulk( target_client, _change_doc_index(docs, target_index, op_type), chunk_size=chunk_size, **kwargs, ) elasticsearch-py-7.17.6/elasticsearch/_async/helpers.pyi000066400000000000000000000066131426163262700233460ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging from typing import ( Any, AsyncGenerator, AsyncIterable, Callable, Collection, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar, Union, ) from ..serializer import Serializer from .client import AsyncElasticsearch logger: logging.Logger T = TypeVar("T") def _chunk_actions( actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer ) -> AsyncGenerator[Any, None]: ... def _process_bulk_chunk( client: AsyncElasticsearch, bulk_actions: Any, bulk_data: Any, raise_on_exception: bool = ..., raise_on_error: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, **kwargs: Any ) -> AsyncGenerator[Tuple[bool, Any], None]: ... def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> AsyncGenerator[T, None]: ... def azip( *iterables: Union[Iterable[T], AsyncIterable[T]] ) -> AsyncGenerator[Tuple[T, ...], None]: ... def async_streaming_bulk( client: AsyncElasticsearch, actions: Union[Iterable[Any], AsyncIterable[Any]], chunk_size: int = ..., max_chunk_bytes: int = ..., raise_on_error: bool = ..., expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., raise_on_exception: bool = ..., max_retries: int = ..., initial_backoff: Union[float, int] = ..., max_backoff: Union[float, int] = ..., yield_ok: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, **kwargs: Any ) -> AsyncGenerator[Tuple[bool, Any], None]: ... async def async_bulk( client: AsyncElasticsearch, actions: Union[Iterable[Any], AsyncIterable[Any]], stats_only: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, **kwargs: Any ) -> Tuple[int, Union[int, List[Any]]]: ... def async_scan( client: AsyncElasticsearch, query: Optional[Any] = ..., scroll: str = ..., raise_on_error: bool = ..., preserve_order: bool = ..., size: int = ..., request_timeout: Optional[Union[float, int]] = ..., clear_scroll: bool = ..., scroll_kwargs: Optional[Mapping[str, Any]] = ..., **kwargs: Any ) -> AsyncGenerator[int, None]: ... async def async_reindex( client: AsyncElasticsearch, source_index: Union[str, Collection[str]], target_index: str, query: Any = ..., target_client: Optional[AsyncElasticsearch] = ..., chunk_size: int = ..., scroll: str = ..., op_type: str = ..., scan_kwargs: Optional[Mapping[str, Any]] = ..., bulk_kwargs: Optional[Mapping[str, Any]] = ..., ) -> Tuple[int, Union[int, List[Any]]]: ... elasticsearch-py-7.17.6/elasticsearch/_async/http_aiohttp.py000066400000000000000000000351731426163262700242450ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import asyncio import os import ssl import warnings import urllib3 # type: ignore from ..compat import reraise_exceptions, urlencode from ..connection.base import Connection from ..exceptions import ( ConnectionError, ConnectionTimeout, ImproperlyConfigured, SSLError, ) from ..utils import _client_meta_version from ._extra_imports import aiohttp, aiohttp_exceptions, yarl from .compat import get_running_loop # sentinel value for `verify_certs`. # This is used to detect if a user is passing in a value # for SSL kwargs if also using an SSLContext. VERIFY_CERTS_DEFAULT = object() SSL_SHOW_WARN_DEFAULT = object() CA_CERTS = None try: import certifi CA_CERTS = certifi.where() except ImportError: pass class AsyncConnection(Connection): """Base class for Async HTTP connection implementations""" async def perform_request( self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None, ): raise NotImplementedError() async def close(self): raise NotImplementedError() class AIOHttpConnection(AsyncConnection): HTTP_CLIENT_META = ("ai", _client_meta_version(aiohttp.__version__)) def __init__( self, host="localhost", port=None, url_prefix="", timeout=10, http_auth=None, use_ssl=False, verify_certs=VERIFY_CERTS_DEFAULT, ssl_show_warn=SSL_SHOW_WARN_DEFAULT, ca_certs=None, client_cert=None, client_key=None, ssl_version=None, ssl_assert_fingerprint=None, maxsize=10, headers=None, ssl_context=None, http_compress=None, cloud_id=None, api_key=None, opaque_id=None, loop=None, **kwargs, ): """ Default connection class for ``AsyncElasticsearch`` using the `aiohttp` library and the http protocol. :arg host: hostname of the node (default: localhost) :arg port: port to use (integer, default: 9200) :arg url_prefix: optional url prefix for elasticsearch :arg timeout: default timeout in seconds (float, default: 10) :arg http_auth: optional http auth information as either ':' separated string or a tuple :arg use_ssl: use ssl for the connection if `True` :arg verify_certs: whether to verify SSL certificates :arg ssl_show_warn: show warning when verify certs is disabled :arg ca_certs: optional path to CA bundle. See https://urllib3.readthedocs.io/en/latest/security.html#using-certifi-with-urllib3 for instructions how to get default set :arg client_cert: path to the file containing the private key and the certificate, or cert only if using client_key :arg client_key: path to the file containing the private key if using separate cert and key files (client_cert will contain only the cert) :arg ssl_version: version of the SSL protocol to use. Choices are: SSLv23 (default) SSLv2 SSLv3 TLSv1 (see ``PROTOCOL_*`` constants in the ``ssl`` module for exact options for your environment). :arg ssl_assert_hostname: use hostname verification if not `False` :arg ssl_assert_fingerprint: verify the supplied certificate fingerprint if not `None` :arg maxsize: the number of connections which will be kept open to this host. See https://urllib3.readthedocs.io/en/1.4/pools.html#api for more information. :arg headers: any custom http headers to be add to requests :arg http_compress: Use gzip compression :arg cloud_id: The Cloud ID from ElasticCloud. Convenient way to connect to cloud instances. Other host connection params will be ignored. :arg api_key: optional API Key authentication as either base64 encoded string or a tuple. :arg opaque_id: Send this value in the 'X-Opaque-Id' HTTP header For tracing all requests made by this transport. :arg loop: asyncio Event Loop to use with aiohttp. This is set by default to the currently running loop. """ self.headers = {} super().__init__( host=host, port=port, url_prefix=url_prefix, timeout=timeout, use_ssl=use_ssl, headers=headers, http_compress=http_compress, cloud_id=cloud_id, api_key=api_key, opaque_id=opaque_id, **kwargs, ) if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ":".join(http_auth) self.headers.update(urllib3.make_headers(basic_auth=http_auth)) # if providing an SSL context, raise error if any other SSL related flag is used if ssl_context and ( (verify_certs is not VERIFY_CERTS_DEFAULT) or (ssl_show_warn is not SSL_SHOW_WARN_DEFAULT) or ca_certs or client_cert or client_key or ssl_version ): warnings.warn( "When using `ssl_context`, all other SSL related kwargs are ignored" ) self.ssl_assert_fingerprint = ssl_assert_fingerprint if self.use_ssl and ssl_context is None: if ssl_version is None: ssl_context = ssl.create_default_context() else: ssl_context = ssl.SSLContext(ssl_version) # Convert all sentinel values to their actual default # values if not using an SSLContext. if verify_certs is VERIFY_CERTS_DEFAULT: verify_certs = True if ssl_show_warn is SSL_SHOW_WARN_DEFAULT: ssl_show_warn = True if verify_certs: ssl_context.verify_mode = ssl.CERT_REQUIRED ssl_context.check_hostname = True else: ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE ca_certs = CA_CERTS if ca_certs is None else ca_certs if verify_certs: if not ca_certs: raise ImproperlyConfigured( "Root certificates are missing for certificate " "validation. Either pass them in using the ca_certs parameter or " "install certifi to use it automatically." ) else: if ssl_show_warn: warnings.warn( "Connecting to %s using SSL with verify_certs=False is insecure." % self.host ) if os.path.isfile(ca_certs): ssl_context.load_verify_locations(cafile=ca_certs) elif os.path.isdir(ca_certs): ssl_context.load_verify_locations(capath=ca_certs) else: raise ImproperlyConfigured("ca_certs parameter is not a path") # Use client_cert and client_key variables for SSL certificate configuration. if client_cert and not os.path.isfile(client_cert): raise ImproperlyConfigured("client_cert is not a path to a file") if client_key and not os.path.isfile(client_key): raise ImproperlyConfigured("client_key is not a path to a file") if client_cert and client_key: ssl_context.load_cert_chain(client_cert, client_key) elif client_cert: ssl_context.load_cert_chain(client_cert) self.headers.setdefault("connection", "keep-alive") self.loop = loop self.session = None # Parameters for creating an aiohttp.ClientSession later. self._limit = maxsize self._http_auth = http_auth self._ssl_context = ssl_context async def perform_request( self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None ): if self.session is None: await self._create_aiohttp_session() assert self.session is not None orig_body = body url_path = self.url_prefix + url if params: query_string = urlencode(params) url_target = "%s?%s" % (url_path, query_string) else: query_string = "" url_target = url_path # There is a bug in aiohttp that disables the re-use # of the connection in the pool when method=HEAD. # See: aio-libs/aiohttp#1769 is_head = False if method == "HEAD": method = "GET" is_head = True # Top-tier tip-toeing happening here. Basically # because Pip's old resolver is bad and wipes out # strict pins in favor of non-strict pins of extras # our [async] extra overrides aiohttp's pin of # yarl. yarl released breaking changes, aiohttp pinned # defensively afterwards, but our users don't get # that nice pin that aiohttp set. :( So to play around # this super-defensively we try to import yarl, if we can't # then we pass a string into ClientSession.request() instead. if yarl: # Provide correct URL object to avoid string parsing in low-level code url = yarl.URL.build( scheme=self.scheme, host=self.hostname, port=self.port, path=url_path, query_string=query_string, encoded=True, ) else: url = self.url_prefix + url if query_string: url = "%s?%s" % (url, query_string) url = self.host + url timeout = aiohttp.ClientTimeout( total=timeout if timeout is not None else self.timeout ) req_headers = self.headers.copy() if headers: req_headers.update(headers) if self.http_compress and body: body = self._gzip_compress(body) req_headers["content-encoding"] = "gzip" start = self.loop.time() try: async with self.session.request( method, url, data=body, headers=req_headers, timeout=timeout, fingerprint=self.ssl_assert_fingerprint, ) as response: response_headers = { header.lower(): value for header, value in response.headers.items() } if is_head: # We actually called 'GET' so throw away the data. await response.release() raw_data = "" else: raw_data = await response.read() content_type = response_headers.get("content-type", "") # The 'application/vnd.mapbox-vector-file' type shouldn't be # decoded into text, instead should be forwarded as bytes. if content_type != "application/vnd.mapbox-vector-tile": raw_data = raw_data.decode("utf-8", "surrogatepass") duration = self.loop.time() - start # We want to reraise a cancellation or recursion error. except reraise_exceptions: raise except Exception as e: self.log_request_fail( method, str(url), url_target, orig_body, self.loop.time() - start, exception=e, ) if isinstance(e, aiohttp_exceptions.ServerFingerprintMismatch): raise SSLError("N/A", str(e), e) if isinstance( e, (asyncio.TimeoutError, aiohttp_exceptions.ServerTimeoutError) ): raise ConnectionTimeout("TIMEOUT", str(e), e) raise ConnectionError("N/A", str(e), e) # raise warnings if any from the 'Warnings' header. warning_headers = response.headers.getall("warning", ()) self._raise_warnings(warning_headers) # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail( method, str(url), url_target, orig_body, duration, status_code=response.status, response=raw_data, ) self._raise_error(response.status, raw_data) self.log_request_success( method, str(url), url_target, orig_body, response.status, raw_data, duration ) return response.status, response_headers, raw_data async def close(self): """ Explicitly closes connection """ if self.session: await self.session.close() async def _create_aiohttp_session(self): """Creates an aiohttp.ClientSession(). This is delayed until the first call to perform_request() so that AsyncTransport has a chance to set AIOHttpConnection.loop """ if self.loop is None: self.loop = get_running_loop() self.session = aiohttp.ClientSession( headers=self.headers, skip_auto_headers=("accept", "accept-encoding", "user-agent"), auto_decompress=True, loop=self.loop, cookie_jar=aiohttp.DummyCookieJar(), response_class=ESClientResponse, connector=aiohttp.TCPConnector( limit=self._limit, use_dns_cache=True, enable_cleanup_closed=True, ssl=self._ssl_context, ), ) class ESClientResponse(aiohttp.ClientResponse): async def text(self, encoding=None, errors="strict"): if self._body is None: await self.read() return self._body.decode("utf-8", "surrogatepass") elasticsearch-py-7.17.6/elasticsearch/_async/http_aiohttp.pyi000066400000000000000000000046221426163262700244110ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Mapping, MutableMapping, Optional, Tuple, Union from ..connection import Connection from ._extra_imports import aiohttp # type: ignore class AsyncConnection(Connection): async def perform_request( # type: ignore self, method: str, url: str, params: Optional[MutableMapping[str, Any]] = ..., body: Optional[bytes] = ..., timeout: Optional[Union[int, float]] = ..., ignore: Collection[int] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Tuple[int, Mapping[str, str], str]: ... async def close(self) -> None: ... class AIOHttpConnection(AsyncConnection): session: Optional[aiohttp.ClientSession] ssl_assert_fingerprint: Optional[str] def __init__( self, host: str = ..., port: Optional[int] = ..., url_prefix: str = ..., timeout: int = ..., http_auth: Optional[Any] = ..., use_ssl: bool = ..., verify_certs: bool = ..., ssl_show_warn: bool = ..., ca_certs: Optional[Any] = ..., client_cert: Optional[Any] = ..., client_key: Optional[Any] = ..., ssl_version: Optional[Any] = ..., ssl_assert_fingerprint: Optional[Any] = ..., maxsize: int = ..., headers: Optional[Mapping[str, str]] = ..., ssl_context: Optional[Any] = ..., http_compress: Optional[bool] = ..., cloud_id: Optional[str] = ..., api_key: Optional[Any] = ..., opaque_id: Optional[str] = ..., meta_header: bool = ..., loop: Any = ..., **kwargs: Any, ) -> None: ... elasticsearch-py-7.17.6/elasticsearch/_async/transport.py000066400000000000000000000536321426163262700235720ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import asyncio import logging import sys import warnings from itertools import chain from ..connection_pool import ConnectionPool from ..exceptions import ( AuthenticationException, AuthorizationException, ConnectionError, ConnectionTimeout, ElasticsearchWarning, SerializationError, TransportError, ) from ..serializer import JSONSerializer from ..transport import Transport, _ProductChecker, get_host_info from .compat import get_running_loop from .http_aiohttp import AIOHttpConnection logger = logging.getLogger("elasticsearch") class AsyncTransport(Transport): """ Encapsulation of transport-related to logic. Handles instantiation of the individual connections as well as creating a connection pool to hold them. Main interface is the `perform_request` method. """ DEFAULT_CONNECTION_CLASS = AIOHttpConnection def __init__( self, hosts, connection_class=None, connection_pool_class=ConnectionPool, host_info_callback=get_host_info, sniff_on_start=False, sniffer_timeout=None, sniff_timeout=0.1, sniff_on_connection_fail=False, serializer=JSONSerializer(), serializers=None, default_mimetype="application/json", max_retries=3, retry_on_status=(502, 503, 504), retry_on_timeout=False, send_get_body_as="GET", meta_header=True, **kwargs ): """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance :arg connection_class: subclass of :class:`~elasticsearch.Connection` to use :arg connection_pool_class: subclass of :class:`~elasticsearch.ConnectionPool` to use :arg host_info_callback: callback responsible for taking the node information from `/_cluster/nodes`, along with already extracted information, and producing a list of arguments (same as `hosts` parameter) :arg sniff_on_start: flag indicating whether to obtain a list of nodes from the cluster at startup time :arg sniffer_timeout: number of seconds between automatic sniffs :arg sniff_on_connection_fail: flag controlling if connection failure triggers a sniff :arg sniff_timeout: timeout used for the sniff request - it should be a fast api call and we are talking potentially to more nodes so we want to fail quickly. Not used during initial sniffing (if ``sniff_on_start`` is on) when the connection still isn't initialized. :arg serializer: serializer instance :arg serializers: optional dict of serializer instances that will be used for deserializing data coming from the server. (key is the mimetype) :arg default_mimetype: when no mimetype is specified by the server response assume this mimetype, defaults to `'application/json'` :arg max_retries: maximum number of retries before an exception is propagated :arg retry_on_status: set of HTTP status codes on which we should retry on a different node. defaults to ``(502, 503, 504)`` :arg retry_on_timeout: should timeout trigger a retry on different node? (default `False`) :arg send_get_body_as: for GET requests with body this option allows you to specify an alternate way of execution for environments that don't support passing bodies with GET requests. If you set this to 'POST' a POST method will be used instead, if to 'source' then the body will be serialized and passed as a query parameter `source`. :arg meta_header: If True will send the 'X-Elastic-Client-Meta' HTTP header containing simple client metadata. Setting to False will disable the header. Defaults to True. Any extra keyword arguments will be passed to the `connection_class` when creating and instance unless overridden by that connection's options provided as part of the hosts parameter. """ self.sniffing_task = None self.loop = None self._async_init_called = False self._sniff_on_start_event = None # type: asyncio.Event super(AsyncTransport, self).__init__( hosts=[], connection_class=connection_class, connection_pool_class=connection_pool_class, host_info_callback=host_info_callback, sniff_on_start=False, sniffer_timeout=sniffer_timeout, sniff_timeout=sniff_timeout, sniff_on_connection_fail=sniff_on_connection_fail, serializer=serializer, serializers=serializers, default_mimetype=default_mimetype, max_retries=max_retries, retry_on_status=retry_on_status, retry_on_timeout=retry_on_timeout, send_get_body_as=send_get_body_as, meta_header=meta_header, **kwargs, ) # Don't enable sniffing on Cloud instances. if kwargs.get("cloud_id", False): sniff_on_start = False # Since we defer connections / sniffing to not occur # within the constructor we never want to signal to # our parent to 'sniff_on_start' or non-empty 'hosts'. self.hosts = hosts self.sniff_on_start = sniff_on_start async def _async_init(self): """This is our stand-in for an async constructor. Everything that was deferred within __init__() should be done here now. This method will only be called once per AsyncTransport instance and is called from one of AsyncElasticsearch.__aenter__(), AsyncTransport.perform_request() or AsyncTransport.get_connection() """ # Detect the async loop we're running in and set it # on all already created HTTP connections. self.loop = get_running_loop() self.kwargs["loop"] = self.loop # Set our 'verified_once' implementation to one that # works with 'asyncio' instead of 'threading' self._verify_elasticsearch_lock = asyncio.Lock() # Now that we have a loop we can create all our HTTP connections... self.set_connections(self.hosts) self.seed_connections = list(self.connection_pool.connections[:]) # ... and we can start sniffing in the background. if self.sniffing_task is None and self.sniff_on_start: # Create an asyncio.Event for future calls to block on # until the initial sniffing task completes. self._sniff_on_start_event = asyncio.Event() try: self.last_sniff = self.loop.time() self.create_sniff_task(initial=True) # Since this is the first one we wait for it to complete # in case there's an error it'll get raised here. await self.sniffing_task # If the task gets cancelled here it likely means the # transport got closed. except asyncio.CancelledError: pass # Once we exit this section we want to unblock any _async_calls() # that are blocking on our initial sniff attempt regardless of it # was successful or not. finally: self._sniff_on_start_event.set() async def _async_call(self): """This method is called within any async method of AsyncTransport where the transport is not closing. This will check to see if we should call our _async_init() or create a new sniffing task """ if not self._async_init_called: self._async_init_called = True await self._async_init() # If the initial sniff_on_start hasn't returned yet # then we need to wait for node information to come back # or for the task to be cancelled via AsyncTransport.close() if self._sniff_on_start_event and not self._sniff_on_start_event.is_set(): # This is already a no-op if the event is set but we try to # avoid an 'await' by checking 'not event.is_set()' above first. await self._sniff_on_start_event.wait() if self.sniffer_timeout: if self.loop.time() >= self.last_sniff + self.sniffer_timeout: self.create_sniff_task() async def _get_node_info(self, conn, initial): try: # use small timeout for the sniffing request, should be a fast api call _, headers, node_info = await conn.perform_request( "GET", "/_nodes/_all/http", timeout=self.sniff_timeout if not initial else None, ) return self.deserializer.loads(node_info, headers.get("content-type")) except Exception: pass return None async def _get_sniff_data(self, initial=False): previous_sniff = self.last_sniff # reset last_sniff timestamp self.last_sniff = self.loop.time() # use small timeout for the sniffing request, should be a fast api call timeout = self.sniff_timeout if not initial else None def _sniff_request(conn): return self.loop.create_task( conn.perform_request("GET", "/_nodes/_all/http", timeout=timeout) ) # Go through all current connections as well as the # seed_connections for good measure tasks = [] for conn in self.connection_pool.connections: tasks.append(_sniff_request(conn)) for conn in self.seed_connections: # Ensure that we don't have any duplication within seed_connections. if conn in self.connection_pool.connections: continue tasks.append(_sniff_request(conn)) done = () try: while tasks: # The 'loop' keyword is deprecated in 3.8+ so don't # pass it to asyncio.wait() unless we're on <=3.7 wait_kwargs = {"loop": self.loop} if sys.version_info < (3, 8) else {} # execute sniff requests in parallel, wait for first to return done, tasks = await asyncio.wait( tasks, return_when=asyncio.FIRST_COMPLETED, **wait_kwargs ) # go through all the finished tasks for t in done: try: _, headers, node_info = t.result() # Lowercase all the header names for consistency in accessing them. headers = { header.lower(): value for header, value in headers.items() } node_info = self.deserializer.loads( node_info, headers.get("content-type") ) except (ConnectionError, SerializationError): continue node_info = list(node_info["nodes"].values()) return node_info else: # no task has finished completely raise TransportError("N/A", "Unable to sniff hosts.") except Exception: # keep the previous value on error self.last_sniff = previous_sniff raise finally: # Cancel all the pending tasks for task in chain(done, tasks): task.cancel() async def sniff_hosts(self, initial=False): """Either spawns a sniffing_task which does regular sniffing over time or does a single sniffing session and awaits the results. """ # Without a loop we can't do anything. if not self.loop: if initial: raise RuntimeError("Event loop not running on initial sniffing task") return node_info = await self._get_sniff_data(initial) hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes, maybe using an incompatible # transport_schema or host_info_callback blocked all - raise error. if not hosts: raise TransportError( "N/A", "Unable to sniff hosts - no viable hosts found." ) # remember current live connections orig_connections = self.connection_pool.connections[:] self.set_connections(hosts) # close those connections that are not in use any more for c in orig_connections: if c not in self.connection_pool.connections: await c.close() def create_sniff_task(self, initial=False): """ Initiate a sniffing task. Make sure we only have one sniff request running at any given time. If a finished sniffing request is around, collect its result (which can raise its exception). """ if self.sniffing_task and self.sniffing_task.done(): try: if self.sniffing_task is not None: self.sniffing_task.result() finally: self.sniffing_task = None if self.sniffing_task is None: self.sniffing_task = self.loop.create_task(self.sniff_hosts(initial)) def mark_dead(self, connection): """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. :arg connection: instance of :class:`~elasticsearch.Connection` that failed """ self.connection_pool.mark_dead(connection) if self.sniff_on_connection_fail: self.create_sniff_task() def get_connection(self): return self.connection_pool.get_connection() async def perform_request(self, method, url, headers=None, params=None, body=None): """ Perform the actual request. Retrieve a connection from the connection pool, pass all the information to it's perform_request method and return the data. If an exception was raised, mark the connection as failed and retry (up to `max_retries` times). If the operation was successful and the connection used was previously marked as dead, mark it as live, resetting it's failure count. :arg method: HTTP method to use :arg url: absolute url (without host) to target :arg headers: dictionary of headers, will be handed over to the underlying :class:`~elasticsearch.Connection` class :arg params: dictionary of query parameters, will be handed over to the underlying :class:`~elasticsearch.Connection` class for serialization :arg body: body of the request, will be serialized using serializer and passed to the connection """ await self._async_call() method, headers, params, body, ignore, timeout = self._resolve_request_args( method, headers, params, body ) # Before we make the actual API call we verify the Elasticsearch instance. if self._verified_elasticsearch is None: await self._do_verify_elasticsearch(headers=headers, timeout=timeout) # If '_verified_elasticsearch' isn't 'True' then we raise an error. if self._verified_elasticsearch is not True: _ProductChecker.raise_error(self._verified_elasticsearch) for attempt in range(self.max_retries + 1): connection = self.get_connection() try: status, headers_response, data = await connection.perform_request( method, url, params, body, headers=headers, ignore=ignore, timeout=timeout, ) # Lowercase all the header names for consistency in accessing them. headers_response = { header.lower(): value for header, value in headers_response.items() } except TransportError as e: if method == "HEAD" and e.status_code == 404: return False retry = False if isinstance(e, ConnectionTimeout): retry = self.retry_on_timeout elif isinstance(e, ConnectionError): retry = True elif e.status_code in self.retry_on_status: retry = True if retry: try: # only mark as dead if we are retrying self.mark_dead(connection) except TransportError: # If sniffing on failure, it could fail too. Catch the # exception not to interrupt the retries. pass # raise exception on last retry if attempt == self.max_retries: raise e else: raise e else: # connection didn't fail, confirm it's live status self.connection_pool.mark_live(connection) if method == "HEAD": return 200 <= status < 300 if data: data = self.deserializer.loads( data, headers_response.get("content-type") ) return data async def close(self): """ Explicitly closes connections """ if self.sniffing_task: try: self.sniffing_task.cancel() await self.sniffing_task except asyncio.CancelledError: pass self.sniffing_task = None for connection in self.connection_pool.connections: await connection.close() async def _do_verify_elasticsearch(self, headers, timeout): """Verifies that we're connected to an Elasticsearch cluster. This is done at least once before the first actual API call and makes a single request to the 'GET /' API endpoint and check version along with other details of the response. If we're unable to verify we're talking to Elasticsearch but we're also unable to rule it out due to a permission error we instead emit an 'ElasticsearchWarning'. """ # Ensure that there's only one async exec within this section # at a time to not emit unnecessary index API calls. async with self._verify_elasticsearch_lock: # Product check has already been completed while we were # waiting our turn, no need to do again. if self._verified_elasticsearch is not None: return headers = { header.lower(): value for header, value in (headers or {}).items() } # We know we definitely want JSON so request it via 'accept' headers.setdefault("accept", "application/json") info_headers = {} info_response = {} error = None attempted_conns = [] for conn in chain(self.connection_pool.connections, self.seed_connections): # Only attempt once per connection max. if conn in attempted_conns: continue attempted_conns.append(conn) try: _, info_headers, info_response = await conn.perform_request( "GET", "/", headers=headers, timeout=timeout ) # Lowercase all the header names for consistency in accessing them. info_headers = { header.lower(): value for header, value in info_headers.items() } info_response = self.deserializer.loads( info_response, mimetype="application/json" ) break # Previous versions of 7.x Elasticsearch required a specific # permission so if we receive HTTP 401/403 we should warn # instead of erroring out. except (AuthenticationException, AuthorizationException): warnings.warn( ( "The client is unable to verify that the server is " "Elasticsearch due security privileges on the server side" ), ElasticsearchWarning, stacklevel=4, ) self._verified_elasticsearch = True return # This connection didn't work, we'll try another. except (ConnectionError, SerializationError, TransportError) as err: if error is None: error = err # If we received a connection error and weren't successful # anywhere then we re-raise the more appropriate error. if error and not info_response: raise error # Check the information we got back from the index request. self._verified_elasticsearch = _ProductChecker.check_product( info_headers, info_response ) elasticsearch-py-7.17.6/elasticsearch/_async/transport.pyi000066400000000000000000000061061426163262700237350ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union from ..connection import Connection from ..connection_pool import ConnectionPool from ..serializer import Deserializer, Serializer class AsyncTransport(object): DEFAULT_CONNECTION_CLASS: Type[Connection] connection_pool: ConnectionPool deserializer: Deserializer max_retries: int retry_on_timeout: bool retry_on_status: Collection[int] send_get_body_as: str serializer: Serializer connection_pool_class: Type[ConnectionPool] connection_class: Type[Connection] kwargs: Any hosts: Optional[List[Dict[str, Any]]] seed_connections: List[Connection] sniffer_timeout: Optional[float] sniff_on_start: bool sniff_on_connection_fail: bool last_sniff: float sniff_timeout: Optional[float] host_info_callback: Callable[ [Dict[str, Any], Optional[Dict[str, Any]]], Dict[str, Any] ] def __init__( self, hosts: Any, connection_class: Optional[Type[Any]] = ..., connection_pool_class: Type[ConnectionPool] = ..., host_info_callback: Callable[ [Dict[str, Any], Dict[str, Any]], Optional[Dict[str, Any]] ] = ..., sniff_on_start: bool = ..., sniffer_timeout: Optional[float] = ..., sniff_timeout: float = ..., sniff_on_connection_fail: bool = ..., serializer: Serializer = ..., serializers: Optional[Mapping[str, Serializer]] = ..., default_mimetype: str = ..., max_retries: int = ..., retry_on_status: Collection[int] = ..., retry_on_timeout: bool = ..., send_get_body_as: str = ..., meta_header: bool = ..., **kwargs: Any ) -> None: ... def add_connection(self, host: Any) -> None: ... def set_connections(self, hosts: Collection[Any]) -> None: ... def get_connection(self) -> Connection: ... def sniff_hosts(self, initial: bool = ...) -> None: ... def mark_dead(self, connection: Connection) -> None: ... async def perform_request( self, method: str, url: str, headers: Optional[Mapping[str, str]] = ..., params: Optional[Mapping[str, Any]] = ..., body: Optional[Any] = ..., ) -> Union[bool, Any]: ... async def close(self) -> None: ... elasticsearch-py-7.17.6/elasticsearch/_version.py000066400000000000000000000014561426163262700221030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. __versionstr__ = "7.17.6" elasticsearch-py-7.17.6/elasticsearch/client/000077500000000000000000000000001426163262700211555ustar00rootroot00000000000000elasticsearch-py-7.17.6/elasticsearch/client/__init__.py000066400000000000000000003125021426163262700232710ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import logging from ..transport import Transport, TransportError from .async_search import AsyncSearchClient from .autoscaling import AutoscalingClient from .cat import CatClient from .ccr import CcrClient from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .data_frame import Data_FrameClient from .deprecation import DeprecationClient from .enrich import EnrichClient from .eql import EqlClient from .features import FeaturesClient from .fleet import FleetClient from .graph import GraphClient from .ilm import IlmClient from .indices import IndicesClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient from .migration import MigrationClient from .ml import MlClient from .monitoring import MonitoringClient from .nodes import NodesClient from .remote import RemoteClient from .rollup import RollupClient from .searchable_snapshots import SearchableSnapshotsClient from .security import SecurityClient from .shutdown import ShutdownClient from .slm import SlmClient from .snapshot import SnapshotClient from .sql import SqlClient from .ssl import SslClient from .tasks import TasksClient from .text_structure import TextStructureClient from .transform import TransformClient from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params from .watcher import WatcherClient from .xpack import XPackClient logger = logging.getLogger("elasticsearch") class Elasticsearch(object): """ Elasticsearch low-level client. Provides a straightforward mapping from Python to ES REST endpoints. The instance has attributes ``cat``, ``cluster``, ``indices``, ``ingest``, ``nodes``, ``snapshot`` and ``tasks`` that provide access to instances of :class:`~elasticsearch.client.CatClient`, :class:`~elasticsearch.client.ClusterClient`, :class:`~elasticsearch.client.IndicesClient`, :class:`~elasticsearch.client.IngestClient`, :class:`~elasticsearch.client.NodesClient`, :class:`~elasticsearch.client.SnapshotClient` and :class:`~elasticsearch.client.TasksClient` respectively. This is the preferred (and only supported) way to get access to those classes and their methods. You can specify your own connection class which should be used by providing the ``connection_class`` parameter:: # create connection to localhost using the ThriftConnection es = Elasticsearch(connection_class=ThriftConnection) If you want to turn on :ref:`sniffing` you have several options (described in :class:`~elasticsearch.Transport`):: # create connection that will automatically inspect the cluster to get # the list of active nodes. Start with nodes running on 'esnode1' and # 'esnode2' es = Elasticsearch( ['esnode1', 'esnode2'], # sniff before doing anything sniff_on_start=True, # refresh nodes after a node fails to respond sniff_on_connection_fail=True, # and also every 60 seconds sniffer_timeout=60 ) Different hosts can have different parameters, use a dictionary per node to specify those:: # connect to localhost directly and another node using SSL on port 443 # and an url_prefix. Note that ``port`` needs to be an int. es = Elasticsearch([ {'host': 'localhost'}, {'host': 'othernode', 'port': 443, 'url_prefix': 'es', 'use_ssl': True}, ]) If using SSL, there are several parameters that control how we deal with certificates (see :class:`~elasticsearch.Urllib3HttpConnection` for detailed description of the options):: es = Elasticsearch( ['localhost:443', 'other_host:443'], # turn on SSL use_ssl=True, # make sure we verify SSL certificates verify_certs=True, # provide a path to CA certs on disk ca_certs='/path/to/CA_certs' ) If using SSL, but don't verify the certs, a warning message is showed optionally (see :class:`~elasticsearch.Urllib3HttpConnection` for detailed description of the options):: es = Elasticsearch( ['localhost:443', 'other_host:443'], # turn on SSL use_ssl=True, # no verify SSL certificates verify_certs=False, # don't show warnings about ssl certs verification ssl_show_warn=False ) SSL client authentication is supported (see :class:`~elasticsearch.Urllib3HttpConnection` for detailed description of the options):: es = Elasticsearch( ['localhost:443', 'other_host:443'], # turn on SSL use_ssl=True, # make sure we verify SSL certificates verify_certs=True, # provide a path to CA certs on disk ca_certs='/path/to/CA_certs', # PEM formatted SSL client certificate client_cert='/path/to/clientcert.pem', # PEM formatted SSL client key client_key='/path/to/clientkey.pem' ) Alternatively you can use RFC-1738 formatted URLs, as long as they are not in conflict with other options:: es = Elasticsearch( [ 'http://user:secret@localhost:9200/', 'https://user:secret@other_host:443/production' ], verify_certs=True ) By default, `JSONSerializer `_ is used to encode all outgoing requests. However, you can implement your own custom serializer:: from elasticsearch.serializer import JSONSerializer class SetEncoder(JSONSerializer): def default(self, obj): if isinstance(obj, set): return list(obj) if isinstance(obj, Something): return 'CustomSomethingRepresentation' return JSONSerializer.default(self, obj) es = Elasticsearch(serializer=SetEncoder()) """ def __init__(self, hosts=None, transport_class=Transport, **kwargs): """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), the entire dictionary will be passed to the :class:`~elasticsearch.Connection` class as kwargs, or a string in the format of ``host[:port]`` which will be translated to a dictionary automatically. If no value is given the :class:`~elasticsearch.Connection` class defaults will be used. :arg transport_class: :class:`~elasticsearch.Transport` subclass to use. :arg kwargs: any additional arguments will be passed on to the :class:`~elasticsearch.Transport` class and, subsequently, to the :class:`~elasticsearch.Connection` instances. """ self.transport = transport_class(_normalize_hosts(hosts), **kwargs) # namespaced clients for compatibility with API names self.async_search = AsyncSearchClient(self) self.autoscaling = AutoscalingClient(self) self.cat = CatClient(self) self.ccr = CcrClient(self) self.cluster = ClusterClient(self) self.dangling_indices = DanglingIndicesClient(self) self.data_frame = Data_FrameClient(self) self.deprecation = DeprecationClient(self) self.enrich = EnrichClient(self) self.eql = EqlClient(self) self.features = FeaturesClient(self) self.fleet = FleetClient(self) self.graph = GraphClient(self) self.ilm = IlmClient(self) self.indices = IndicesClient(self) self.ingest = IngestClient(self) self.license = LicenseClient(self) self.logstash = LogstashClient(self) self.migration = MigrationClient(self) self.ml = MlClient(self) self.monitoring = MonitoringClient(self) self.nodes = NodesClient(self) self.remote = RemoteClient(self) self.rollup = RollupClient(self) self.searchable_snapshots = SearchableSnapshotsClient(self) self.security = SecurityClient(self) self.shutdown = ShutdownClient(self) self.slm = SlmClient(self) self.snapshot = SnapshotClient(self) self.sql = SqlClient(self) self.ssl = SslClient(self) self.tasks = TasksClient(self) self.text_structure = TextStructureClient(self) self.transform = TransformClient(self) self.watcher = WatcherClient(self) self.xpack = XPackClient(self) def __repr__(self): try: # get a list of all connections cons = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] return "<{cls}({cons})>".format(cls=self.__class__.__name__, cons=cons) except Exception: # probably operating on custom transport and connection_pool, ignore return super(Elasticsearch, self).__repr__() def __enter__(self): if hasattr(self.transport, "_async_call"): self.transport._async_call() return self def __exit__(self, *_): self.close() def close(self): """Closes the Transport and all internal connections""" self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params( response_mimetypes=["application/json"], ) def ping(self, params=None, headers=None): """ Returns whether the cluster is running. ``_ """ try: return self.transport.perform_request( "HEAD", "/", params=params, headers=headers ) except TransportError: return False @query_params( response_mimetypes=["application/json"], ) def info(self, params=None, headers=None): """ Returns basic information about the cluster. ``_ """ return self.transport.perform_request( "GET", "/", params=params, headers=headers ) @query_params( "pipeline", "refresh", "routing", "timeout", "version", "version_type", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_name="document", ) def create(self, index, id, body, doc_type=None, params=None, headers=None): """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. ``_ :arg index: The name of the index :arg id: Document ID :arg document: The document :arg doc_type: The type of the document :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ for param in (index, id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_create", id) else: path = _make_path(index, doc_type, id, "_create") return self.transport.perform_request( "PUT", path, params=params, headers=headers, body=body ) @query_params( "if_primary_term", "if_seq_no", "op_type", "pipeline", "refresh", "require_alias", "routing", "timeout", "version", "version_type", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_name="document", ) def index(self, index, body, doc_type=None, id=None, params=None, headers=None): """ Creates or updates a document in an index. ``_ :arg index: The name of the index :arg document: The document :arg doc_type: The type of the document :arg id: Document ID :arg if_primary_term: only perform the index operation if the last operation that has changed the document has the specified primary term :arg if_seq_no: only perform the index operation if the last operation that has changed the document has the specified sequence number :arg op_type: Explicit operation type. Defaults to `index` for requests with an explicit document ID, and to `create`for requests without an explicit document ID Valid choices: index, create :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg require_alias: When true, requires destination to be an alias. Default is false :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type is None: doc_type = "_doc" return self.transport.perform_request( "POST" if id in SKIP_IN_PATH else "PUT", _make_path(index, doc_type, id), params=params, headers=headers, body=body, ) @query_params( "_source", "_source_excludes", "_source_includes", "pipeline", "refresh", "require_alias", "routing", "timeout", "wait_for_active_shards", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) def bulk(self, body, index=None, doc_type=None, params=None, headers=None): """ Allows to perform multiple index/update/delete operations in a single request. ``_ :arg body: The operation definition and data (action-data pairs), separated by newlines :arg index: Default index for items which don't provide one :arg doc_type: Default document type for items which don't provide one :arg _source: True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub- request :arg _source_excludes: Default list of fields to exclude from the returned _source field, can be overridden on each sub-request :arg _source_includes: Default list of fields to extract and return from the _source field, can be overridden on each sub-request :arg pipeline: The pipeline id to preprocess incoming documents with :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg require_alias: Sets require_alias for all incoming documents. Defaults to unset (false) :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the bulk operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", _make_path(index, doc_type, "_bulk"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json", "text/plain"], response_mimetypes=["application/json"], body_params=["scroll_id"], ) def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): """ Explicitly clears the search context for a scroll. ``_ :arg body: A comma-separated list of scroll IDs to clear if none was specified via the scroll_id parameter :arg scroll_id: A comma-separated list of scroll IDs to clear """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'scroll_id'.") elif scroll_id and not body: body = {"scroll_id": [scroll_id]} elif scroll_id: params["scroll_id"] = scroll_id return self.transport.perform_request( "DELETE", "/_search/scroll", params=params, headers=headers, body=body ) @query_params( "allow_no_indices", "analyze_wildcard", "analyzer", "default_operator", "df", "expand_wildcards", "ignore_throttled", "ignore_unavailable", "lenient", "min_score", "preference", "q", "routing", "terminate_after", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def count(self, body=None, index=None, doc_type=None, params=None, headers=None): """ Returns number of documents matching a query. ``_ :arg body: A query to restrict the results specified with the Query DSL (optional) :arg index: A comma-separated list of indices to restrict the results :arg doc_type: A comma-separated list of types to restrict the results :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg min_score: Include only documents with a specific `_score` value in the result :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg routing: A comma-separated list of specific routing values :arg terminate_after: The maximum count for each shard, upon reaching which the query execution will terminate early """ return self.transport.perform_request( "POST", _make_path(index, doc_type, "_count"), params=params, headers=headers, body=body, ) @query_params( "if_primary_term", "if_seq_no", "refresh", "routing", "timeout", "version", "version_type", "wait_for_active_shards", response_mimetypes=["application/json"], ) def delete(self, index, id, doc_type=None, params=None, headers=None): """ Removes a document from the index. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document :arg if_primary_term: only perform the delete operation if the last operation that has changed the document has the specified primary term :arg if_seq_no: only perform the delete operation if the last operation that has changed the document has the specified sequence number :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes. Valid choices: true, false, wait_for :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the delete operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: doc_type = "_doc" return self.transport.perform_request( "DELETE", _make_path(index, doc_type, id), params=params, headers=headers ) @query_params( "allow_no_indices", "analyze_wildcard", "analyzer", "conflicts", "default_operator", "df", "expand_wildcards", "from_", "ignore_unavailable", "lenient", "max_docs", "preference", "q", "refresh", "request_cache", "requests_per_second", "routing", "scroll", "scroll_size", "search_timeout", "search_type", "size", "slices", "sort", "stats", "terminate_after", "timeout", "version", "wait_for_active_shards", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def delete_by_query(self, index, body, doc_type=None, params=None, headers=None): """ Deletes documents matching the provided query. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: The search definition using the Query DSL :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg conflicts: What to do when the delete by query hits version conflicts? Valid choices: abort, proceed Default: abort :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg from_: Starting offset (default: 0) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_docs: Maximum number of documents to process (default: all documents) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg refresh: Should the effected indexes be refreshed? :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg requests_per_second: The throttle for this request in sub- requests per second. -1 means no throttle. :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg scroll_size: Size on the scroll request powering the delete by query Default: 100 :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg size: Deprecated, please use `max_docs` instead :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`. Default: 1 :arg sort: A comma-separated list of : pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for shards that are unavailable. Default: 1m :arg version: Specify whether to return document version as part of a hit :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the delete by query operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) :arg wait_for_completion: Should the request should block until the delete by query is complete. Default: True """ if "from_" in params: params["from"] = params.pop("from_") for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path(index, doc_type, "_delete_by_query"), params=params, headers=headers, body=body, ) @query_params( "requests_per_second", response_mimetypes=["application/json"], ) def delete_by_query_rethrottle(self, task_id, params=None, headers=None): """ Changes the number of requests per second for a particular Delete By Query operation. ``_ :arg task_id: The task id to rethrottle :arg requests_per_second: The throttle to set on this request in floating sub-requests per second. -1 means set no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") return self.transport.perform_request( "POST", _make_path("_delete_by_query", task_id, "_rethrottle"), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_script(self, id, params=None, headers=None): """ Deletes a script. ``_ :arg id: Script ID :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_scripts", id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "stored_fields", "version", "version_type", response_mimetypes=["application/json"], ) def exists(self, index, id, doc_type=None, params=None, headers=None): """ Returns information about whether a document exists in an index. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document (use `_all` to fetch the first document matching the ID across all types) :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg stored_fields: A comma-separated list of stored fields to return in the response :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: doc_type = "_doc" return self.transport.perform_request( "HEAD", _make_path(index, doc_type, id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "version", "version_type", response_mimetypes=["application/json"], ) def exists_source(self, index, id, doc_type=None, params=None, headers=None): """ Returns information about whether a document source exists in an index. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document; deprecated and optional starting with 7.0 :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_source", id) else: path = _make_path(index, doc_type, id, "_source") return self.transport.perform_request( "HEAD", path, params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "analyze_wildcard", "analyzer", "default_operator", "df", "lenient", "preference", "q", "routing", "stored_fields", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def explain(self, index, id, body=None, doc_type=None, params=None, headers=None): """ Returns information about why a specific matches (or doesn't match) a query. ``_ :arg index: The name of the index :arg id: The document ID :arg body: The query definition using the Query DSL :arg doc_type: The type of the document :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg analyze_wildcard: Specify whether wildcards and prefix queries in the query string query should be analyzed (default: false) :arg analyzer: The analyzer for the query string query :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The default field for query string query (default: _all) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg routing: Specific routing value :arg stored_fields: A comma-separated list of stored fields to return in the response """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_explain", id) else: path = _make_path(index, doc_type, id, "_explain") return self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable", "include_unmapped", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def field_caps(self, body=None, index=None, params=None, headers=None): """ Returns the information about the capabilities of fields among multiple indices. ``_ :arg body: An index filter specified with the Query DSL :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fields: A comma-separated list of field names :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_unmapped: Indicates whether unmapped fields should be included in the response. """ return self.transport.perform_request( "POST", _make_path(index, "_field_caps"), params=params, headers=headers, body=body, ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "stored_fields", "version", "version_type", response_mimetypes=["application/json"], ) def get(self, index, id, doc_type=None, params=None, headers=None): """ Returns a document. ``_ :arg index: Name of the index that contains the document. :arg id: Unique identifier of the document. :arg doc_type: The type of the document (use `_all` to fetch the first document matching the ID across all types) :arg _source: True or false to return the _source field or not, or a list of fields to return. :arg _source_excludes: A comma-separated list of source fields to exclude in the response. :arg _source_includes: A comma-separated list of source fields to include in the response. :arg preference: Specifies the node or shard the operation should be performed on. Random by default. :arg realtime: Boolean) If true, the request is real-time as opposed to near-real-time. Default: True :arg refresh: If true, Elasticsearch refreshes the affected shards to make this operation visible to search. If false, do nothing with refreshes. :arg routing: Target the specified primary shard. :arg stored_fields: A comma-separated list of stored fields to return in the response :arg version: Explicit version number for concurrency control. The specified version must match the current version of the document for the request to succeed. :arg version_type: Specific version type: internal, external, external_gte. Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: doc_type = "_doc" return self.transport.perform_request( "GET", _make_path(index, doc_type, id), params=params, headers=headers ) @query_params( "master_timeout", response_mimetypes=["application/json"], ) def get_script(self, id, params=None, headers=None): """ Returns a script. ``_ :arg id: Script ID :arg master_timeout: Specify timeout for connection to master """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_scripts", id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "version", "version_type", response_mimetypes=["application/json"], ) def get_source(self, index, id, doc_type=None, params=None, headers=None): """ Returns the source of a document. ``_ :arg index: The name of the index :arg id: The document ID :arg doc_type: The type of the document; deprecated and optional starting with 7.0 :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_source", id) else: path = _make_path(index, doc_type, id, "_source") return self.transport.perform_request( "GET", path, params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "preference", "realtime", "refresh", "routing", "stored_fields", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def mget(self, body, index=None, doc_type=None, params=None, headers=None): """ Allows to get multiple documents in one request. ``_ :arg body: Document identifiers; can be either `docs` (containing full document information) or `ids` (when index and type is provided in the URL. :arg index: The name of the index :arg doc_type: The type of the document :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg realtime: Specify whether to perform the operation in realtime or search mode :arg refresh: Refresh the shard containing the document before performing the operation :arg routing: Specific routing value :arg stored_fields: A comma-separated list of stored fields to return in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", _make_path(index, doc_type, "_mget"), params=params, headers=headers, body=body, ) @query_params( "ccs_minimize_roundtrips", "max_concurrent_searches", "max_concurrent_shard_requests", "pre_filter_shard_size", "rest_total_hits_as_int", "search_type", "typed_keys", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) def msearch(self, body, index=None, doc_type=None, params=None, headers=None): """ Allows to execute several search operations in one request. ``_ :arg body: The request definitions (metadata-search request definition pairs), separated by newlines :arg index: A comma-separated list of index names to use as default :arg doc_type: A comma-separated list of document types to use as default :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg max_concurrent_searches: Controls the maximum number of concurrent searches the multi search api will execute :arg max_concurrent_shard_requests: The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests Default: 5 :arg pre_filter_shard_size: A threshold that enforces a pre- filter roundtrip to prefilter search shards based on query rewriting if the number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on its rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint. :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", _make_path(index, doc_type, "_msearch"), params=params, headers=headers, body=body, ) @query_params( "ccs_minimize_roundtrips", "max_concurrent_searches", "rest_total_hits_as_int", "search_type", "typed_keys", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) def msearch_template( self, body, index=None, doc_type=None, params=None, headers=None ): """ Allows to execute several search template operations in one request. ``_ :arg body: The request definitions (metadata-search request definition pairs), separated by newlines :arg index: A comma-separated list of index names to use as default :arg doc_type: A comma-separated list of document types to use as default :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg max_concurrent_searches: Controls the maximum number of concurrent searches the multi search api will execute :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", _make_path(index, doc_type, "_msearch", "template"), params=params, headers=headers, body=body, ) @query_params( "field_statistics", "fields", "ids", "offsets", "payloads", "positions", "preference", "realtime", "routing", "term_statistics", "version", "version_type", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def mtermvectors( self, body=None, index=None, doc_type=None, params=None, headers=None ): """ Returns multiple termvectors in one request. ``_ :arg body: Define ids, documents, parameters or a list of parameters per document here. You must at least provide a list of document ids. See documentation. :arg index: The index in which the document resides. :arg doc_type: The type of the document. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg fields: A comma-separated list of fields to return. Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg ids: A comma-separated list of documents ids. You must define ids as parameter or set "ids" or "docs" in the request body :arg offsets: Specifies if term offsets should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg payloads: Specifies if term payloads should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg positions: Specifies if term positions should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". Default: True :arg preference: Specify the node or shard the operation should be performed on (default: random) .Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg realtime: Specifies if requests are real-time as opposed to near-real-time (default: true). :arg routing: Specific routing value. Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg term_statistics: Specifies if total term frequency and document frequency should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs". :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ if doc_type in SKIP_IN_PATH: path = _make_path(index, "_mtermvectors") else: path = _make_path(index, doc_type, "_mtermvectors") return self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_script(self, id, body, context=None, params=None, headers=None): """ Creates or updates a script. ``_ :arg id: Script ID :arg body: The document :arg context: Script context :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_scripts", id, context), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def rank_eval(self, body, index=None, params=None, headers=None): """ Allows to evaluate the quality of ranked search results over a set of typical search queries ``_ :arg body: The ranking evaluation search definition, including search requests, document ratings and ranking metric definition. :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", _make_path(index, "_rank_eval"), params=params, headers=headers, body=body, ) @query_params( "max_docs", "refresh", "requests_per_second", "scroll", "slices", "timeout", "wait_for_active_shards", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def reindex(self, body, params=None, headers=None): """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or fetching the documents from a remote cluster. ``_ :arg body: The search definition using the Query DSL and the prototype for the index request. :arg max_docs: Maximum number of documents to process (default: all documents) :arg refresh: Should the affected indexes be refreshed? :arg requests_per_second: The throttle to set on this request in sub-requests per second. -1 means no throttle. :arg scroll: Control how long to keep the search context alive Default: 5m :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`. Default: 1 :arg timeout: Time each individual bulk request should wait for shards that are unavailable. Default: 1m :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the reindex operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) :arg wait_for_completion: Should the request should block until the reindex is complete. Default: True """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_reindex", params=params, headers=headers, body=body ) @query_params( "requests_per_second", response_mimetypes=["application/json"], ) def reindex_rethrottle(self, task_id, params=None, headers=None): """ Changes the number of requests per second for a particular Reindex operation. ``_ :arg task_id: The task id to rethrottle :arg requests_per_second: The throttle to set on this request in floating sub-requests per second. -1 means set no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") return self.transport.perform_request( "POST", _make_path("_reindex", task_id, "_rethrottle"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def render_search_template(self, body=None, id=None, params=None, headers=None): """ Allows to use the Mustache language to pre-render a search definition. ``_ :arg body: The search definition template and its params :arg id: The id of the stored search template """ return self.transport.perform_request( "POST", _make_path("_render", "template", id), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def scripts_painless_execute(self, body=None, params=None, headers=None): """ Allows an arbitrary script to be executed and a result to be returned ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg body: The script to execute """ return self.transport.perform_request( "POST", "/_scripts/painless/_execute", params=params, headers=headers, body=body, ) @query_params( "rest_total_hits_as_int", "scroll", "scroll_id", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=["scroll", "scroll_id"], ) def scroll(self, body=None, scroll_id=None, params=None, headers=None): """ Allows to retrieve a large numbers of results from a single search request. ``_ :arg body: The scroll ID if not passed by URL or query parameter. :arg scroll_id: The scroll ID :arg rest_total_hits_as_int: If true, the API response’s hit.total property is returned as an integer. If false, the API response’s hit.total property is returned as an object. :arg scroll: Period to retain the search context for scrolling. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'scroll_id'.") elif scroll_id and not body: body = {"scroll_id": scroll_id} elif scroll_id: params["scroll_id"] = scroll_id return self.transport.perform_request( "POST", "/_search/scroll", params=params, headers=headers, body=body ) @query_params( "_source", "_source_excludes", "_source_includes", "allow_no_indices", "allow_partial_search_results", "analyze_wildcard", "analyzer", "batched_reduce_size", "ccs_minimize_roundtrips", "default_operator", "df", "docvalue_fields", "expand_wildcards", "explain", "from_", "ignore_throttled", "ignore_unavailable", "lenient", "max_concurrent_shard_requests", "min_compatible_shard_node", "pre_filter_shard_size", "preference", "q", "request_cache", "rest_total_hits_as_int", "routing", "scroll", "search_type", "seq_no_primary_term", "size", "sort", "stats", "stored_fields", "suggest_field", "suggest_mode", "suggest_size", "suggest_text", "terminate_after", "timeout", "track_scores", "track_total_hits", "typed_keys", "version", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=[ "_source", "aggregations", "aggs", "collapse", "docvalue_fields", "explain", "fields", "from_", "highlight", "indices_boost", "min_score", "pit", "post_filter", "profile", "query", "rescore", "runtime_mappings", "script_fields", "search_after", "seq_no_primary_term", "size", "slice", "sort", "stats", "stored_fields", "suggest", "terminate_after", "timeout", "track_scores", "track_total_hits", "version", ], ) def search(self, body=None, index=None, doc_type=None, params=None, headers=None): """ Returns results matching a query. ``_ :arg body: The search definition using the Query DSL :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg _source: Indicates which source fields are returned for matching documents. These fields are returned in the hits._source property of the search response. :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg aggregations: :arg aggs: :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg allow_partial_search_results: Indicate if an error should be returned if there is a partial search failure or timeout Default: True :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large. Default: 512 :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg collapse: :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg docvalue_fields: Array of wildcard (*) patterns. The request returns doc values for field names matching these patterns in the hits.fields property of the response. :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: If true, returns detailed information about score computation as part of a hit. :arg fields: Array of wildcard (*) patterns. The request returns values for field names matching these patterns in the hits.fields property of the response. :arg from_: Starting document offset. By default, you cannot page through more than 10,000 hits using the from and size parameters. To page through more hits, use the search_after parameter. :arg highlight: :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg indices_boost: Boosts the _score of documents from specified indices. :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests Default: 5 :arg min_compatible_shard_node: The minimum compatible version that all shards involved in search should have for this request to be successful :arg min_score: Minimum _score for matching documents. Documents with a lower _score are not included in the search results. :arg pit: Limits the search to a point in time (PIT). If you provide a PIT, you cannot specify an in the request path. :arg post_filter: :arg pre_filter_shard_size: A threshold that enforces a pre- filter roundtrip to prefilter search shards based on query rewriting if the number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on its rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint. :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg profile: :arg q: Query in the Lucene query string syntax :arg query: Defines the search definition using the Query DSL. :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg rescore: :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg routing: A comma-separated list of specific routing values :arg runtime_mappings: Defines one or more runtime fields in the search request. These fields take precedence over mapped fields with the same name. :arg script_fields: Retrieve a script evaluation (based on different fields) for each hit. :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_after: :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg seq_no_primary_term: If true, returns sequence number and primary term of the last modification of each hit. See Optimistic concurrency control. :arg size: The number of hits to return. By default, you cannot page through more than 10,000 hits using the from and size parameters. To page through more hits, use the search_after parameter. :arg slice: :arg sort: :arg stats: Stats groups to associate with the search. Each group maintains a statistics aggregation for its associated searches. You can retrieve these stats using the indices stats API. :arg stored_fields: List of stored fields to return as part of a hit. If no fields are specified, no stored fields are included in the response. If this field is specified, the _source parameter defaults to false. You can pass _source: true to return both source fields and stored fields in the search response. :arg suggest: :arg suggest_field: Specifies which field to use for suggestions. :arg suggest_mode: Specify suggest mode Valid choices: missing, popular, always Default: missing :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned. :arg terminate_after: Maximum number of documents to collect for each shard. If a query reaches this limit, Elasticsearch terminates the query early. Elasticsearch collects documents before sorting. Defaults to 0, which does not terminate query execution early. :arg timeout: Specifies the period of time to wait for a response from each shard. If no response is received before the timeout expires, the request fails and returns an error. Defaults to no timeout. :arg track_scores: If true, calculate and return document scores, even if the scores are not used for sorting. :arg track_total_hits: Number of hits matching the query to count accurately. If true, the exact number of hits is returned at the cost of some performance. If false, the response does not include the total number of hits matching the query. Defaults to 10,000 hits. :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response :arg version: If true, returns document version as part of a hit. """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "POST", _make_path(index, doc_type, "_search"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", "preference", "routing", response_mimetypes=["application/json"], ) def search_shards(self, index=None, params=None, headers=None): """ Returns information about the indices and shards that a search request would be executed against. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg routing: Specific routing value """ return self.transport.perform_request( "GET", _make_path(index, "_search_shards"), params=params, headers=headers ) @query_params( "allow_no_indices", "ccs_minimize_roundtrips", "expand_wildcards", "explain", "ignore_throttled", "ignore_unavailable", "preference", "profile", "rest_total_hits_as_int", "routing", "scroll", "search_type", "typed_keys", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def search_template( self, body, index=None, doc_type=None, params=None, headers=None ): """ Allows to use the Mustache language to pre-render a search definition. ``_ :arg body: The search definition template and its params :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests execution Default: true :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg profile: Specify whether to profile the query execution :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", _make_path(index, doc_type, "_search", "template"), params=params, headers=headers, body=body, ) @query_params( "field_statistics", "fields", "offsets", "payloads", "positions", "preference", "realtime", "routing", "term_statistics", "version", "version_type", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def termvectors( self, index, body=None, doc_type=None, id=None, params=None, headers=None ): """ Returns information and statistics about terms in the fields of a particular document. ``_ :arg index: The index in which the document resides. :arg body: Define parameters and or supply a document to get termvectors for. See documentation. :arg doc_type: The type of the document. :arg id: The id of the document, when not specified a doc param should be supplied. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Default: True :arg fields: A comma-separated list of fields to return. :arg offsets: Specifies if term offsets should be returned. Default: True :arg payloads: Specifies if term payloads should be returned. Default: True :arg positions: Specifies if term positions should be returned. Default: True :arg preference: Specify the node or shard the operation should be performed on (default: random). :arg realtime: Specifies if request is real-time as opposed to near-real-time (default: true). :arg routing: Specific routing value. :arg term_statistics: Specifies if total term frequency and document frequency should be returned. :arg version: Explicit version number for concurrency control :arg version_type: Specific version type Valid choices: internal, external, external_gte, force """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_termvectors", id) else: path = _make_path(index, doc_type, id, "_termvectors") return self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "_source", "_source_excludes", "_source_includes", "if_primary_term", "if_seq_no", "lang", "refresh", "require_alias", "retry_on_conflict", "routing", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=[ "_source", "detect_noop", "doc", "doc_as_upsert", "script", "scripted_upsert", "upsert", ], body_required=True, ) def update(self, index, id, body, doc_type=None, params=None, headers=None): """ Updates a document with a script or partial document. ``_ :arg index: The name of the index :arg id: Document ID :arg body: The request definition requires either `script` or partial `doc` :arg doc_type: The type of the document :arg _source: Set to false to disable source retrieval. You can also specify a comma-separated list of the fields you want to retrieve. :arg _source_excludes: Specify the source fields you want to exclude. :arg _source_includes: Specify the source fields you want to retrieve. :arg detect_noop: Set to false to disable setting 'result' in the response to 'noop' if no change to the document occurred. :arg doc: A partial update to an existing document. :arg doc_as_upsert: Set to true to use the contents of 'doc' as the value of 'upsert' :arg if_primary_term: Only perform the operation if the document has this primary term. :arg if_seq_no: Only perform the operation if the document has this sequence number. :arg lang: The script language. Default: painless :arg refresh: If 'true', Elasticsearch refreshes the affected shards to make this operation visible to search, if 'wait_for' then wait for a refresh to make this operation visible to search, if 'false' do nothing with refreshes. Valid choices: true, false, wait_for Default: false :arg require_alias: If true, the destination must be an index alias. :arg retry_on_conflict: Specify how many times should the operation be retried when a conflict occurs. :arg routing: Custom value used to route operations to a specific shard. :arg script: Script to execute to update the document. :arg scripted_upsert: Set to true to execute the script whether or not the document exists. :arg timeout: Period to wait for dynamic mapping updates and active shards. This guarantees Elasticsearch waits for at least the timeout before failing. The actual wait time could be longer, particularly when multiple waits occur. Default: 1m :arg upsert: If the document does not already exist, the contents of 'upsert' are inserted as a new document. If the document exists, the 'script' is executed. :arg wait_for_active_shards: The number of shard copies that must be active before proceeding with the operations. Set to 'all' or any positive integer up to the total number of shards in the index (number_of_replicas+1). Defaults to 1 meaning the primary shard. Default: 1 """ for param in (index, id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") if doc_type in SKIP_IN_PATH: path = _make_path(index, "_update", id) else: path = _make_path(index, doc_type, id, "_update") return self.transport.perform_request( "POST", path, params=params, headers=headers, body=body ) @query_params( "allow_no_indices", "analyze_wildcard", "analyzer", "conflicts", "default_operator", "df", "expand_wildcards", "from_", "ignore_unavailable", "lenient", "max_docs", "pipeline", "preference", "q", "refresh", "request_cache", "requests_per_second", "routing", "scroll", "scroll_size", "search_timeout", "search_type", "size", "slices", "sort", "stats", "terminate_after", "timeout", "version", "version_type", "wait_for_active_shards", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_by_query( self, index, body=None, doc_type=None, params=None, headers=None ): """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: The search definition using the Query DSL :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg conflicts: What to do when the update by query hits version conflicts? Valid choices: abort, proceed Default: abort :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg from_: Starting offset (default: 0) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_docs: Maximum number of documents to process (default: all documents) :arg pipeline: Ingest pipeline to set on index requests made by this action. (default: none) :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg refresh: Should the affected indexes be refreshed? :arg request_cache: Specify if request cache should be used for this request or not, defaults to index level setting :arg requests_per_second: The throttle to set on this request in sub-requests per second. -1 means no throttle. :arg routing: A comma-separated list of specific routing values :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg scroll_size: Size on the scroll request powering the update by query Default: 100 :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg size: Deprecated, please use `max_docs` instead :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`. Default: 1 :arg sort: A comma-separated list of : pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for shards that are unavailable. Default: 1m :arg version: Specify whether to return document version as part of a hit :arg version_type: Should the document increment the version number (internal) on hit or not (reindex) :arg wait_for_active_shards: Sets the number of shard copies that must be active before proceeding with the update by query operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) :arg wait_for_completion: Should the request should block until the update by query operation is complete. Default: True """ if "from_" in params: params["from"] = params.pop("from_") if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, doc_type, "_update_by_query"), params=params, headers=headers, body=body, ) @query_params( "requests_per_second", response_mimetypes=["application/json"], ) def update_by_query_rethrottle(self, task_id, params=None, headers=None): """ Changes the number of requests per second for a particular Update By Query operation. ``_ :arg task_id: The task id to rethrottle :arg requests_per_second: The throttle to set on this request in floating sub-requests per second. -1 means set no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") return self.transport.perform_request( "POST", _make_path("_update_by_query", task_id, "_rethrottle"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_script_context(self, params=None, headers=None): """ Returns all script contexts. ``_ """ return self.transport.perform_request( "GET", "/_script_context", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_script_languages(self, params=None, headers=None): """ Returns available script types, languages and contexts ``_ """ return self.transport.perform_request( "GET", "/_script_language", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def close_point_in_time(self, body=None, params=None, headers=None): """ Close a point in time ``_ :arg body: a point-in-time id to close """ return self.transport.perform_request( "DELETE", "/_pit", params=params, headers=headers, body=body ) @query_params( "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing", response_mimetypes=["application/json"], ) def open_point_in_time(self, index, params=None, headers=None): """ Open a point in time that can be used in subsequent searches ``_ :arg index: A comma-separated list of index names to open point in time; use `_all` or empty string to perform the operation on all indices :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg keep_alive: Specific the time to live for the point in time :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg routing: Specific routing value """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_pit"), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def terms_enum(self, index, body=None, params=None, headers=None): """ The terms enum API can be used to discover terms in the index that begin with the provided string. It is designed for low-latency look-ups used in auto- complete scenarios. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: field name, string which is the prefix expected in matching terms, timeout and size for max number of results """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_terms_enum"), params=params, headers=headers, body=body, ) @query_params( "exact_bounds", "extent", "grid_precision", "grid_type", "size", request_mimetypes=["application/json"], response_mimetypes=["application/vnd.mapbox-vector-tile"], body_params=[ "aggs", "exact_bounds", "extent", "fields", "grid_precision", "grid_type", "query", "runtime_mappings", "size", "sort", ], ) def search_mvt( self, index, field, zoom, x, y, body=None, params=None, headers=None ): """ Searches a vector tile for geospatial values. Returns results as a binary Mapbox vector tile. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: Comma-separated list of data streams, indices, or aliases to search :arg field: Field containing geospatial data to return :arg zoom: Zoom level for the vector tile to search :arg x: X coordinate for the vector tile to search :arg y: Y coordinate for the vector tile to search :arg body: Search request body. :arg aggs: Sub-aggregations for the geotile_grid. Supports the following aggregation types: - avg - cardinality - max - min - sum :arg exact_bounds: If false, the meta layer’s feature is the bounding box of the tile. If true, the meta layer’s feature is a bounding box resulting from a geo_bounds aggregation. The aggregation runs on values that intersect the // tile with wrap_longitude set to false. The resulting bounding box may be larger than the vector tile. :arg extent: Size, in pixels, of a side of the tile. Vector tiles are square with equal sides. :arg fields: Fields to return in the `hits` layer. Supports wildcards (`*`). This parameter does not support fields with array values. Fields with array values may return inconsistent results. :arg grid_precision: Additional zoom levels available through the aggs layer. For example, if is 7 and grid_precision is 8, you can zoom in up to level 15. Accepts 0-8. If 0, results don’t include the aggs layer. :arg grid_type: Determines the geometry type for features in the aggs layer. In the aggs layer, each feature represents a geotile_grid cell. If 'grid' each feature is a Polygon of the cells bounding box. If 'point' each feature is a Point that is the centroid of the cell. :arg query: Query DSL used to filter documents for the search. :arg runtime_mappings: Defines one or more runtime fields in the search request. These fields take precedence over mapped fields with the same name. :arg size: Maximum number of features to return in the hits layer. Accepts 0-10000. If 0, results don’t include the hits layer. :arg sort: Sorts features in the hits layer. By default, the API calculates a bounding box for each feature. It sorts features based on this box’s diagonal length, from longest to shortest. """ for param in (index, field, zoom, x, y): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path(index, "_mvt", field, zoom, x, y), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/__init__.pyi000066400000000000000000001474231426163262700234520ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import logging from typing import ( Any, Collection, Dict, List, Mapping, MutableMapping, Optional, Sequence, Tuple, Type, Union, ) from typing_extensions import Literal from ..transport import Transport from .async_search import AsyncSearchClient from .autoscaling import AutoscalingClient from .cat import CatClient from .ccr import CcrClient from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .enrich import EnrichClient from .eql import EqlClient from .features import FeaturesClient from .graph import GraphClient from .ilm import IlmClient from .indices import IndicesClient from .ingest import IngestClient from .license import LicenseClient from .logstash import LogstashClient from .migration import MigrationClient from .ml import MlClient from .monitoring import MonitoringClient from .nodes import NodesClient from .remote import RemoteClient from .rollup import RollupClient from .searchable_snapshots import SearchableSnapshotsClient from .security import SecurityClient from .shutdown import ShutdownClient from .slm import SlmClient from .snapshot import SnapshotClient from .sql import SqlClient from .ssl import SslClient from .tasks import TasksClient from .text_structure import TextStructureClient from .transform import TransformClient from .watcher import WatcherClient from .xpack import XPackClient logger: logging.Logger class Elasticsearch(object): transport: Transport async_search: AsyncSearchClient autoscaling: AutoscalingClient cat: CatClient ccr: CcrClient cluster: ClusterClient dangling_indices: DanglingIndicesClient enrich: EnrichClient eql: EqlClient features: FeaturesClient graph: GraphClient ilm: IlmClient indices: IndicesClient ingest: IngestClient license: LicenseClient logstash: LogstashClient migration: MigrationClient ml: MlClient monitoring: MonitoringClient nodes: NodesClient remote: RemoteClient rollup: RollupClient searchable_snapshots: SearchableSnapshotsClient security: SecurityClient shutdown: ShutdownClient slm: SlmClient snapshot: SnapshotClient sql: SqlClient ssl: SslClient tasks: TasksClient text_structure: TextStructureClient transform: TransformClient watcher: WatcherClient xpack: XPackClient def __init__( self, hosts: Any = ..., transport_class: Type[Transport] = ..., **kwargs: Any, ) -> None: ... def __repr__(self) -> str: ... def __enter__(self) -> "Elasticsearch": ... def __exit__(self, *_: Any) -> None: ... def close(self) -> None: ... # AUTO-GENERATED-API-DEFINITIONS # def ping( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def info( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def create( self, *, index: str, id: str, document: Any, doc_type: Optional[str] = ..., pipeline: Optional[str] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., routing: Optional[str] = ..., timeout: Optional[Union[int, str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def index( self, *, index: str, document: Any, doc_type: Optional[str] = ..., id: Optional[str] = ..., if_primary_term: Optional[int] = ..., if_seq_no: Optional[int] = ..., op_type: Optional[Union[Literal["create", "index"], str]] = ..., pipeline: Optional[str] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., require_alias: Optional[bool] = ..., routing: Optional[str] = ..., timeout: Optional[Union[int, str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def bulk( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., pipeline: Optional[Any] = ..., refresh: Optional[Any] = ..., require_alias: Optional[bool] = ..., routing: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_scroll( self, *, body: Optional[Mapping[str, Any]] = ..., scroll_id: Optional[Union[List[str], str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def count( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., min_score: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., routing: Optional[Any] = ..., terminate_after: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete( self, *, index: str, id: str, doc_type: Optional[str] = ..., if_primary_term: Optional[int] = ..., if_seq_no: Optional[int] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., routing: Optional[str] = ..., timeout: Optional[Union[int, str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_by_query( self, *, index: Any, body: Mapping[str, Any], doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., conflicts: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., from_: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., max_docs: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., refresh: Optional[bool] = ..., request_cache: Optional[bool] = ..., requests_per_second: Optional[Any] = ..., routing: Optional[Any] = ..., scroll: Optional[Any] = ..., scroll_size: Optional[Any] = ..., search_timeout: Optional[Any] = ..., search_type: Optional[Any] = ..., size: Optional[Any] = ..., slices: Optional[Any] = ..., sort: Optional[Any] = ..., stats: Optional[Any] = ..., terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., version: Optional[bool] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_by_query_rethrottle( self, *, task_id: Any, requests_per_second: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_script( self, *, id: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def exists( self, *, index: str, id: str, doc_type: Optional[str] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., preference: Optional[str] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[str] = ..., stored_fields: Optional[Union[List[str], str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def exists_source( self, *, index: Any, id: Any, doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[Any] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def explain( self, *, index: Any, id: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., lenient: Optional[bool] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., routing: Optional[Any] = ..., stored_fields: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def field_caps( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., fields: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., include_unmapped: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get( self, *, index: str, id: str, doc_type: Optional[str] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., preference: Optional[str] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[str] = ..., stored_fields: Optional[Union[List[str], str]] = ..., version: Optional[int] = ..., version_type: Optional[ Union[Literal["external", "external_gte", "force", "internal"], str] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_script( self, *, id: Any, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_source( self, *, index: Any, id: Any, doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[Any] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def mget( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., refresh: Optional[bool] = ..., routing: Optional[Any] = ..., stored_fields: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def msearch( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., max_concurrent_searches: Optional[Any] = ..., max_concurrent_shard_requests: Optional[Any] = ..., pre_filter_shard_size: Optional[Any] = ..., rest_total_hits_as_int: Optional[bool] = ..., search_type: Optional[Any] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def msearch_template( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., max_concurrent_searches: Optional[Any] = ..., rest_total_hits_as_int: Optional[bool] = ..., search_type: Optional[Any] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def mtermvectors( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., doc_type: Optional[Any] = ..., field_statistics: Optional[bool] = ..., fields: Optional[Any] = ..., ids: Optional[Any] = ..., offsets: Optional[bool] = ..., payloads: Optional[bool] = ..., positions: Optional[bool] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., routing: Optional[Any] = ..., term_statistics: Optional[bool] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_script( self, *, id: Any, body: Mapping[str, Any], context: Optional[Any] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def rank_eval( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., search_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def reindex( self, *, body: Mapping[str, Any], max_docs: Optional[Any] = ..., refresh: Optional[bool] = ..., requests_per_second: Optional[Any] = ..., scroll: Optional[Any] = ..., slices: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def reindex_rethrottle( self, *, task_id: Any, requests_per_second: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def render_search_template( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def scripts_painless_execute( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def scroll( self, *, body: Optional[Mapping[str, Any]] = ..., rest_total_hits_as_int: Optional[bool] = ..., scroll: Optional[Union[int, str]] = ..., scroll_id: Optional[str] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def search( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Union[List[str], str]] = ..., doc_type: Optional[Union[List[str], str]] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., aggregations: Optional[Mapping[str, Mapping[str, Any]]] = ..., aggs: Optional[Mapping[str, Mapping[str, Any]]] = ..., allow_no_indices: Optional[bool] = ..., allow_partial_search_results: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[str] = ..., batched_reduce_size: Optional[int] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., collapse: Optional[Mapping[str, Any]] = ..., default_operator: Optional[Union[Literal["and", "or"], str]] = ..., df: Optional[str] = ..., docvalue_fields: Optional[Union[List[str], str]] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., explain: Optional[bool] = ..., fields: Optional[List[Mapping[str, Any]]] = ..., from_: Optional[int] = ..., highlight: Optional[Mapping[str, Any]] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., indices_boost: Optional[List[Mapping[str, float]]] = ..., lenient: Optional[bool] = ..., max_concurrent_shard_requests: Optional[int] = ..., min_compatible_shard_node: Optional[str] = ..., min_score: Optional[float] = ..., pit: Optional[Mapping[str, Any]] = ..., post_filter: Optional[Mapping[str, Any]] = ..., pre_filter_shard_size: Optional[int] = ..., preference: Optional[str] = ..., profile: Optional[bool] = ..., q: Optional[str] = ..., query: Optional[Mapping[str, Any]] = ..., request_cache: Optional[bool] = ..., rescore: Optional[Union[List[Mapping[str, Any]], Mapping[str, Any]]] = ..., rest_total_hits_as_int: Optional[bool] = ..., routing: Optional[str] = ..., runtime_mappings: Optional[Mapping[str, Mapping[str, Any]]] = ..., script_fields: Optional[Mapping[str, Mapping[str, Any]]] = ..., scroll: Optional[Union[int, str]] = ..., search_after: Optional[List[Union[None, float, int, str]]] = ..., search_type: Optional[ Union[Literal["dfs_query_then_fetch", "query_then_fetch"], str] ] = ..., seq_no_primary_term: Optional[bool] = ..., size: Optional[int] = ..., slice: Optional[Mapping[str, Any]] = ..., sort: Optional[Union[List[str], str]] = ..., stats: Optional[List[str]] = ..., stored_fields: Optional[Union[List[str], str]] = ..., suggest: Optional[Mapping[str, Any]] = ..., suggest_field: Optional[str] = ..., suggest_mode: Optional[ Union[Literal["always", "missing", "popular"], str] ] = ..., suggest_size: Optional[int] = ..., suggest_text: Optional[str] = ..., terminate_after: Optional[int] = ..., timeout: Optional[Union[int, str]] = ..., track_scores: Optional[bool] = ..., track_total_hits: Optional[Union[bool, int]] = ..., typed_keys: Optional[bool] = ..., version: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def search_shards( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., preference: Optional[Any] = ..., routing: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def search_template( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., ccs_minimize_roundtrips: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., explain: Optional[bool] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., preference: Optional[Any] = ..., profile: Optional[bool] = ..., rest_total_hits_as_int: Optional[bool] = ..., routing: Optional[Any] = ..., scroll: Optional[Any] = ..., search_type: Optional[Any] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def termvectors( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., id: Optional[Any] = ..., field_statistics: Optional[bool] = ..., fields: Optional[Any] = ..., offsets: Optional[bool] = ..., payloads: Optional[bool] = ..., positions: Optional[bool] = ..., preference: Optional[Any] = ..., realtime: Optional[bool] = ..., routing: Optional[Any] = ..., term_statistics: Optional[bool] = ..., version: Optional[Any] = ..., version_type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update( self, *, index: str, id: str, body: Mapping[str, Any], doc_type: Optional[str] = ..., _source: Optional[Union[Union[List[str], str], bool]] = ..., _source_excludes: Optional[Union[List[str], str]] = ..., _source_includes: Optional[Union[List[str], str]] = ..., detect_noop: Optional[bool] = ..., doc: Optional[Any] = ..., doc_as_upsert: Optional[bool] = ..., if_primary_term: Optional[int] = ..., if_seq_no: Optional[int] = ..., lang: Optional[str] = ..., refresh: Optional[Union[Literal["false", "true", "wait_for"], bool, str]] = ..., require_alias: Optional[bool] = ..., retry_on_conflict: Optional[int] = ..., routing: Optional[str] = ..., script: Optional[Mapping[str, Any]] = ..., scripted_upsert: Optional[bool] = ..., timeout: Optional[Union[int, str]] = ..., upsert: Optional[Any] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_by_query( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., conflicts: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., from_: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., max_docs: Optional[Any] = ..., pipeline: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., refresh: Optional[bool] = ..., request_cache: Optional[bool] = ..., requests_per_second: Optional[Any] = ..., routing: Optional[Any] = ..., scroll: Optional[Any] = ..., scroll_size: Optional[Any] = ..., search_timeout: Optional[Any] = ..., search_type: Optional[Any] = ..., size: Optional[Any] = ..., slices: Optional[Any] = ..., sort: Optional[Any] = ..., stats: Optional[Any] = ..., terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., version: Optional[bool] = ..., version_type: Optional[bool] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_by_query_rethrottle( self, *, task_id: Any, requests_per_second: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_script_context( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_script_languages( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def close_point_in_time( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def open_point_in_time( self, *, index: Any, expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., keep_alive: Optional[Any] = ..., preference: Optional[Any] = ..., routing: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def terms_enum( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def search_mvt( self, *, index: Union[List[str], str], field: str, zoom: int, x: int, y: int, body: Optional[Mapping[str, Any]] = ..., aggs: Optional[Mapping[str, Mapping[str, Any]]] = ..., exact_bounds: Optional[bool] = ..., extent: Optional[int] = ..., fields: Optional[Union[List[str], str]] = ..., grid_precision: Optional[int] = ..., grid_type: Optional[Union[Literal["centroid", "grid", "point"], str]] = ..., query: Optional[Mapping[str, Any]] = ..., runtime_mappings: Optional[Mapping[str, Mapping[str, Any]]] = ..., size: Optional[int] = ..., sort: Optional[ Union[List[Union[Mapping[str, Any], str]], Union[Mapping[str, Any], str]] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bytes: ... elasticsearch-py-7.17.6/elasticsearch/client/async_search.py000066400000000000000000000242721426163262700242000ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class AsyncSearchClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete(self, id, params=None, headers=None): """ Deletes an async search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_async_search", id), params=params, headers=headers ) @query_params( "keep_alive", "typed_keys", "wait_for_completion_timeout", response_mimetypes=["application/json"], ) def get(self, id, params=None, headers=None): """ Retrieves the results of a previously submitted async search request given its ID. ``_ :arg id: The async search ID :arg keep_alive: Specify the time interval in which the results (partial or final) for this search will be available :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_async_search", id), params=params, headers=headers ) @query_params( "_source", "_source_excludes", "_source_includes", "allow_no_indices", "allow_partial_search_results", "analyze_wildcard", "analyzer", "batched_reduce_size", "default_operator", "df", "docvalue_fields", "expand_wildcards", "explain", "from_", "ignore_throttled", "ignore_unavailable", "keep_alive", "keep_on_completion", "lenient", "max_concurrent_shard_requests", "preference", "q", "request_cache", "routing", "search_type", "seq_no_primary_term", "size", "sort", "stats", "stored_fields", "suggest_field", "suggest_mode", "suggest_size", "suggest_text", "terminate_after", "timeout", "track_scores", "track_total_hits", "typed_keys", "version", "wait_for_completion_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def submit(self, body=None, index=None, params=None, headers=None): """ Executes a search request asynchronously. ``_ :arg body: The search definition using the Query DSL :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg _source: True or false to return the _source field or not, or a list of fields to return :arg _source_excludes: A list of fields to exclude from the returned _source field :arg _source_includes: A list of fields to extract and return from the _source field :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg allow_partial_search_results: Indicate if an error should be returned if there is a partial search failure or timeout Default: True :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as the granularity at which progress results will be made available. Default: 5 :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg docvalue_fields: A comma-separated list of fields to return as the docvalue representation of a field for each hit :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: Specify whether to return detailed information about score computation as part of a hit :arg from_: Starting offset (default: 0) :arg ignore_throttled: Whether specified concrete, expanded or aliased indices should be ignored when throttled :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg keep_alive: Update the time interval in which the results (partial or final) for this search will be available Default: 5d :arg keep_on_completion: Control whether the response should be stored in the cluster if it completed within the provided [wait_for_completion] time (default: false) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests Default: 5 :arg preference: Specify the node or shard the operation should be performed on (default: random) :arg q: Query in the Lucene query string syntax :arg request_cache: Specify if request cache should be used for this request or not, defaults to true :arg routing: A comma-separated list of specific routing values :arg search_type: Search operation type Valid choices: query_then_fetch, dfs_query_then_fetch :arg seq_no_primary_term: Specify whether to return sequence number and primary term of the last modification of each hit :arg size: Number of hits to return (default: 10) :arg sort: A comma-separated list of : pairs :arg stats: Specific 'tag' of the request for logging and statistical purposes :arg stored_fields: A comma-separated list of stored fields to return as part of a hit :arg suggest_field: Specify which field to use for suggestions :arg suggest_mode: Specify suggest mode Valid choices: missing, popular, always Default: missing :arg suggest_size: How many suggestions to return in response :arg suggest_text: The source text for which the suggestions should be returned :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Explicit operation timeout :arg track_scores: Whether to calculate and return scores even if they are not used for sorting :arg track_total_hits: Indicate if the number of documents that match the query should be tracked :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response :arg version: Specify whether to return document version as part of a hit :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response Default: 1s """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "POST", _make_path(index, "_async_search"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def status(self, id, params=None, headers=None): """ Retrieves the status of a previously submitted async search request given its ID. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_async_search", "status", id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/async_search.pyi000066400000000000000000000133321426163262700243440ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class AsyncSearchClient(NamespacedClient): def delete( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get( self, *, id: Any, keep_alive: Optional[Any] = ..., typed_keys: Optional[bool] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def submit( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., _source: Optional[Any] = ..., _source_excludes: Optional[Any] = ..., _source_includes: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., allow_partial_search_results: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., batched_reduce_size: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., docvalue_fields: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., explain: Optional[bool] = ..., from_: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., keep_alive: Optional[Any] = ..., keep_on_completion: Optional[bool] = ..., lenient: Optional[bool] = ..., max_concurrent_shard_requests: Optional[Any] = ..., preference: Optional[Any] = ..., q: Optional[Any] = ..., request_cache: Optional[bool] = ..., routing: Optional[Any] = ..., search_type: Optional[Any] = ..., seq_no_primary_term: Optional[bool] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., stats: Optional[Any] = ..., stored_fields: Optional[Any] = ..., suggest_field: Optional[Any] = ..., suggest_mode: Optional[Any] = ..., suggest_size: Optional[Any] = ..., suggest_text: Optional[Any] = ..., terminate_after: Optional[Any] = ..., timeout: Optional[Any] = ..., track_scores: Optional[bool] = ..., track_total_hits: Optional[bool] = ..., typed_keys: Optional[bool] = ..., version: Optional[bool] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def status( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/autoscaling.py000066400000000000000000000076351426163262700240530ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class AutoscalingClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete_autoscaling_policy(self, name, params=None, headers=None): """ Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg name: the name of the autoscaling policy """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_autoscaling", "policy", name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_autoscaling_policy(self, name, params=None, headers=None): """ Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg name: the name of the autoscaling policy """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "GET", _make_path("_autoscaling", "policy", name), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_autoscaling_policy(self, name, body, params=None, headers=None): """ Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg name: the name of the autoscaling policy :arg body: the specification of the autoscaling policy """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_autoscaling", "policy", name), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def get_autoscaling_capacity(self, params=None, headers=None): """ Gets the current autoscaling capacity based on the configured autoscaling policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ """ return self.transport.perform_request( "GET", "/_autoscaling/capacity", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/autoscaling.pyi000066400000000000000000000075371426163262700242250ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class AutoscalingClient(NamespacedClient): def delete_autoscaling_policy( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_autoscaling_policy( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_autoscaling_policy( self, *, name: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_autoscaling_capacity( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/cat.py000066400000000000000000001063661426163262700223120ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params( "expand_wildcards", "format", "h", "help", "local", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def aliases(self, name=None, params=None, headers=None): """ Shows information about currently configured aliases to indices including filter and routing infos. ``_ :arg name: A comma-separated list of alias names to return :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) @query_params( "bytes", "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def allocation(self, node_id=None, params=None, headers=None): """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "allocation", node_id), params=params, headers=headers, ) @query_params( "format", "h", "help", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def count(self, index=None, params=None, headers=None): """ Provides quick access to the document count of the entire cluster, or individual indices. ``_ :arg index: A comma-separated list of index names to limit the returned information :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers ) @query_params( "format", "h", "help", "s", "time", "ts", "v", response_mimetypes=["text/plain", "application/json"], ) def health(self, params=None, headers=None): """ Returns a concise representation of the cluster health. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg ts: Set to false to disable timestamping Default: True :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers ) @query_params( "help", "s", response_mimetypes=["text/plain"], ) def help(self, params=None, headers=None): """ Returns help for the Cat APIs. ``_ :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by """ return self.transport.perform_request( "GET", "/_cat", params=params, headers=headers ) @query_params( "bytes", "expand_wildcards", "format", "h", "health", "help", "include_unloaded_segments", "local", "master_timeout", "pri", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def indices(self, index=None, params=None, headers=None): """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... ``_ :arg index: A comma-separated list of index names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg health: A health status ("green", "yellow", or "red" to filter only indices matching the specified health status Valid choices: green, yellow, red :arg help: Return help information :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg pri: Set to true to return stats only for primary shards :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def master(self, params=None, headers=None): """ Returns information about the master node. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/master", params=params, headers=headers ) @query_params( "bytes", "format", "full_id", "h", "help", "include_unloaded_segments", "local", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def nodes(self, params=None, headers=None): """ Returns basic statistics about performance of cluster nodes. ``_ :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg full_id: Return the full node ID instead of the shortened version (default: false) :arg h: Comma-separated list of column names to display :arg help: Return help information :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg local: Calculate the selected nodes using the local cluster state rather than the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers ) @query_params( "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def recovery(self, index=None, params=None, headers=None): """ Returns information about index shard recoveries, both on-going completed. ``_ :arg index: Comma-separated list or wildcard expression of index names to limit the returned information :arg active_only: If `true`, the response only includes ongoing shard recoveries :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg detailed: If `true`, the response includes detailed information about shard recoveries :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "recovery", index), params=params, headers=headers ) @query_params( "bytes", "format", "h", "help", "local", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def shards(self, index=None, params=None, headers=None): """ Provides a detailed view of shard allocation on nodes. ``_ :arg index: A comma-separated list of index names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( "bytes", "format", "h", "help", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def segments(self, index=None, params=None, headers=None): """ Provides low-level information about the segments in the shards of an index. ``_ :arg index: A comma-separated list of index names to limit the returned information :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def pending_tasks(self, params=None, headers=None): """ Returns a concise representation of the cluster pending tasks. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/pending_tasks", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "size", "v", response_mimetypes=["text/plain", "application/json"], ) def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): """ Returns cluster-wide thread pool statistics per node. By default the active, queue and rejected statistics are returned for all thread pools. ``_ :arg thread_pool_patterns: A comma-separated list of regular- expressions to filter the thread pools in the output :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg size: The multiplier in which to display values Valid choices: , k, m, g, t, p :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "thread_pool", thread_pool_patterns), params=params, headers=headers, ) @query_params( "bytes", "format", "h", "help", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def fielddata(self, fields=None, params=None, headers=None): """ Shows how much heap memory is currently being used by fielddata on every data node in the cluster. ``_ :arg fields: A comma-separated list of fields to return the fielddata size :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "fielddata", fields), params=params, headers=headers, ) @query_params( "format", "h", "help", "include_bootstrap", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def plugins(self, params=None, headers=None): """ Returns information about installed plugins across nodes node. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg include_bootstrap: Include bootstrap plugins in the response :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def nodeattrs(self, params=None, headers=None): """ Returns information about custom node attributes. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def repositories(self, params=None, headers=None): """ Returns information about snapshot repositories registered in the cluster. ``_ :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/repositories", params=params, headers=headers ) @query_params( "format", "h", "help", "ignore_unavailable", "master_timeout", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def snapshots(self, repository=None, params=None, headers=None): """ Returns all snapshots in a specific repository. ``_ :arg repository: Name of repository from which to fetch the snapshot information :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg ignore_unavailable: Set to true to ignore unavailable snapshots :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "snapshots", repository), params=params, headers=headers, ) @query_params( "actions", "detailed", "format", "h", "help", "nodes", "parent_task_id", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def tasks(self, params=None, headers=None): """ Returns information about the tasks currently executing on one or more nodes in the cluster. ``_ :arg actions: A comma-separated list of actions that should be returned. Leave empty to return all. :arg detailed: Return detailed task information (default: false) :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg nodes: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers ) @query_params( "format", "h", "help", "local", "master_timeout", "s", "v", response_mimetypes=["text/plain", "application/json"], ) def templates(self, name=None, params=None, headers=None): """ Returns information about existing templates. ``_ :arg name: A pattern that returned template names must match :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg s: Comma-separated list of column names or column aliases to sort by :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) @query_params( "allow_no_match", "bytes", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def ml_data_frame_analytics(self, id=None, params=None, headers=None): """ Gets configuration and usage information about data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no configs. (This includes `_all` string or when no configs have been specified) :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "ml", "data_frame", "analytics", id), params=params, headers=headers, ) @query_params( "allow_no_datafeeds", "allow_no_match", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def ml_datafeeds(self, datafeed_id=None, params=None, headers=None): """ Gets configuration and usage information about datafeeds. ``_ :arg datafeed_id: The ID of the datafeeds stats to fetch :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "ml", "datafeeds", datafeed_id), params=params, headers=headers, ) @query_params( "allow_no_jobs", "allow_no_match", "bytes", "format", "h", "help", "s", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def ml_jobs(self, job_id=None, params=None, headers=None): """ Gets configuration and usage information about anomaly detection jobs. ``_ :arg job_id: The ID of the jobs stats to fetch :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( "GET", _make_path("_cat", "ml", "anomaly_detectors", job_id), params=params, headers=headers, ) @query_params( "allow_no_match", "bytes", "format", "from_", "h", "help", "s", "size", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def ml_trained_models(self, model_id=None, params=None, headers=None): """ Gets configuration and usage information about inference trained models. ``_ :arg model_id: The ID of the trained models stats to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no trained models. (This includes `_all` string or when no trained models have been specified) Default: True :arg bytes: The unit in which to display byte values Valid choices: b, k, kb, m, mb, g, gb, t, tb, p, pb :arg format: a short version of the Accept header, e.g. json, yaml :arg from_: skips a number of trained models :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg size: specifies a max number of trained models to get Default: 100 :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_cat", "ml", "trained_models", model_id), params=params, headers=headers, ) @query_params( "allow_no_match", "format", "from_", "h", "help", "s", "size", "time", "v", response_mimetypes=["text/plain", "application/json"], ) def transforms(self, transform_id=None, params=None, headers=None): """ Gets configuration and usage information about transforms. ``_ :arg transform_id: The id of the transform for which to get stats. '_all' or '*' implies all transforms :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg format: a short version of the Accept header, e.g. json, yaml :arg from_: skips a number of transform configs, defaults to 0 :arg h: Comma-separated list of column names to display :arg help: Return help information :arg s: Comma-separated list of column names or column aliases to sort by :arg size: specifies a max number of transforms to get, defaults to 100 :arg time: The unit in which to display time values Valid choices: d, h, m, s, ms, micros, nanos :arg v: Verbose mode. Display column headers """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_cat", "transforms", transform_id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/cat.pyi000066400000000000000000000616771426163262700224700ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class CatClient(NamespacedClient): def aliases( self, *, name: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def allocation( self, *, node_id: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def count( self, *, index: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def health( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., ts: Optional[bool] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def help( self, *, help: Optional[bool] = ..., s: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> str: ... def indices( self, *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., health: Optional[Any] = ..., help: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pri: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def master( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def nodes( self, *, bytes: Optional[Any] = ..., format: Optional[Any] = ..., full_id: Optional[bool] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def recovery( self, *, index: Optional[Any] = ..., active_only: Optional[bool] = ..., bytes: Optional[Any] = ..., detailed: Optional[bool] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def shards( self, *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def segments( self, *, index: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def pending_tasks( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def thread_pool( self, *, thread_pool_patterns: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def fielddata( self, *, fields: Optional[Any] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def plugins( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., include_bootstrap: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def nodeattrs( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def repositories( self, *, format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def snapshots( self, *, repository: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def tasks( self, *, actions: Optional[Any] = ..., detailed: Optional[bool] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., nodes: Optional[Any] = ..., parent_task_id: Optional[Any] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def templates( self, *, name: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., s: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def ml_data_frame_analytics( self, *, id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def ml_datafeeds( self, *, datafeed_id: Optional[Any] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def ml_jobs( self, *, job_id: Optional[Any] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def ml_trained_models( self, *, model_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., bytes: Optional[Any] = ..., format: Optional[Any] = ..., from_: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... def transforms( self, *, transform_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., format: Optional[Any] = ..., from_: Optional[Any] = ..., h: Optional[Any] = ..., help: Optional[bool] = ..., s: Optional[Any] = ..., size: Optional[Any] = ..., time: Optional[Any] = ..., v: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Union[Dict[str, Any], str]: ... elasticsearch-py-7.17.6/elasticsearch/client/ccr.py000066400000000000000000000267201426163262700223050ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class CcrClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete_auto_follow_pattern(self, name, params=None, headers=None): """ Deletes auto-follow patterns. ``_ :arg name: The name of the auto follow pattern. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_ccr", "auto_follow", name), params=params, headers=headers, ) @query_params( "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def follow(self, index, body, params=None, headers=None): """ Creates a new follower index configured to follow the referenced leader index. ``_ :arg index: The name of the follower index :arg body: The name of the leader index and other optional ccr related parameters :arg wait_for_active_shards: Sets the number of shard copies that must be active before returning. Defaults to 0. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1) Default: 0 """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path(index, "_ccr", "follow"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def follow_info(self, index, params=None, headers=None): """ Retrieves information about all follower indices, including parameters and status for each follower index ``_ :arg index: A comma-separated list of index patterns; use `_all` to perform the operation on all indices """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index, "_ccr", "info"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def follow_stats(self, index, params=None, headers=None): """ Retrieves follower stats. return shard-level stats about the following tasks associated with each shard for the specified indices. ``_ :arg index: A comma-separated list of index patterns; use `_all` to perform the operation on all indices """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index, "_ccr", "stats"), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def forget_follower(self, index, body, params=None, headers=None): """ Removes the follower retention leases from the leader. ``_ :arg index: the name of the leader index for which specified follower retention leases should be removed :arg body: the name and UUID of the follower index, the name of the cluster containing the follower index, and the alias from the perspective of that cluster for the remote cluster containing the leader index """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path(index, "_ccr", "forget_follower"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def get_auto_follow_pattern(self, name=None, params=None, headers=None): """ Gets configured auto-follow patterns. Returns the specified auto-follow pattern collection. ``_ :arg name: The name of the auto follow pattern. """ return self.transport.perform_request( "GET", _make_path("_ccr", "auto_follow", name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def pause_follow(self, index, params=None, headers=None): """ Pauses a follower index. The follower index will not fetch any additional operations from the leader index. ``_ :arg index: The name of the follower index that should pause following its leader index. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_ccr", "pause_follow"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_auto_follow_pattern(self, name, body, params=None, headers=None): """ Creates a new named collection of auto-follow patterns against a specified remote cluster. Newly created indices on the remote cluster matching any of the specified patterns will be automatically configured as follower indices. ``_ :arg name: The name of the auto follow pattern. :arg body: The specification of the auto follow pattern """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ccr", "auto_follow", name), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def resume_follow(self, index, body=None, params=None, headers=None): """ Resumes a follower index that has been paused ``_ :arg index: The name of the follow index to resume following. :arg body: The name of the leader index and other optional ccr related parameters """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_ccr", "resume_follow"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def stats(self, params=None, headers=None): """ Gets all stats related to cross-cluster replication. ``_ """ return self.transport.perform_request( "GET", "/_ccr/stats", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def unfollow(self, index, params=None, headers=None): """ Stops the following task associated with a follower index and removes index metadata and settings associated with cross-cluster replication. ``_ :arg index: The name of the follower index that should be turned into a regular index. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_ccr", "unfollow"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def pause_auto_follow_pattern(self, name, params=None, headers=None): """ Pauses an auto-follow pattern ``_ :arg name: The name of the auto follow pattern that should pause discovering new indices to follow. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "POST", _make_path("_ccr", "auto_follow", name, "pause"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def resume_auto_follow_pattern(self, name, params=None, headers=None): """ Resumes an auto-follow pattern that has been paused ``_ :arg name: The name of the auto follow pattern to resume discovering new indices to follow. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "POST", _make_path("_ccr", "auto_follow", name, "resume"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/ccr.pyi000066400000000000000000000245521426163262700224570ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class CcrClient(NamespacedClient): def delete_auto_follow_pattern( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def follow( self, *, index: Any, body: Mapping[str, Any], wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def follow_info( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def follow_stats( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def forget_follower( self, *, index: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_auto_follow_pattern( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def pause_follow( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_auto_follow_pattern( self, *, name: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def resume_follow( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def unfollow( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def pause_auto_follow_pattern( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def resume_auto_follow_pattern( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/cluster.py000066400000000000000000000421701426163262700232140ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ClusterClient(NamespacedClient): @query_params( "expand_wildcards", "level", "local", "master_timeout", "timeout", "wait_for_active_shards", "wait_for_events", "wait_for_no_initializing_shards", "wait_for_no_relocating_shards", "wait_for_nodes", "wait_for_status", response_mimetypes=["application/json"], ) def health(self, index=None, params=None, headers=None): """ Returns basic information about the health of the cluster. ``_ :arg index: Limit the information returned to a specific index :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg level: Specify the level of detail for returned information Valid choices: cluster, indices, shards Default: cluster :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Wait until the specified number of shards is active :arg wait_for_events: Wait until all currently queued events with the given priority are processed Valid choices: immediate, urgent, high, normal, low, languid :arg wait_for_no_initializing_shards: Whether to wait until there are no initializing shards in the cluster :arg wait_for_no_relocating_shards: Whether to wait until there are no relocating shards in the cluster :arg wait_for_nodes: Wait until the specified number of nodes is available :arg wait_for_status: Wait until cluster is in a specific state Valid choices: green, yellow, red """ return self.transport.perform_request( "GET", _make_path("_cluster", "health", index), params=params, headers=headers, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) def pending_tasks(self, params=None, headers=None): """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. ``_ :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master """ return self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "local", "master_timeout", "wait_for_metadata_version", "wait_for_timeout", response_mimetypes=["application/json"], ) def state(self, metric=None, index=None, params=None, headers=None): """ Returns a comprehensive information about the state of the cluster. ``_ :arg metric: Limit the information returned to the specified metrics Valid choices: _all, blocks, metadata, nodes, routing_table, routing_nodes, master_node, version :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master :arg wait_for_metadata_version: Wait for the metadata version to be equal or greater than the specified metadata version :arg wait_for_timeout: The maximum time to wait for wait_for_metadata_version before timing out """ if index and metric in SKIP_IN_PATH: metric = "_all" return self.transport.perform_request( "GET", _make_path("_cluster", "state", metric, index), params=params, headers=headers, ) @query_params( "flat_settings", "timeout", response_mimetypes=["application/json"], ) def stats(self, node_id=None, params=None, headers=None): """ Returns high-level overview of cluster statistics. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg flat_settings: Return settings in flat format (default: false) :arg timeout: Explicit operation timeout """ return self.transport.perform_request( "GET", "/_cluster/stats" if node_id in SKIP_IN_PATH else _make_path("_cluster", "stats", "nodes", node_id), params=params, headers=headers, ) @query_params( "dry_run", "explain", "master_timeout", "metric", "retry_failed", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def reroute(self, body=None, params=None, headers=None): """ Allows to manually change the allocation of individual shards in the cluster. ``_ :arg body: The definition of `commands` to perform (`move`, `cancel`, `allocate`) :arg dry_run: Simulate the operation only and return the resulting state :arg explain: Return an explanation of why the commands can or cannot be executed :arg master_timeout: Explicit operation timeout for connection to master node :arg metric: Limit the information returned to the specified metrics. Defaults to all but metadata Valid choices: _all, blocks, metadata, nodes, routing_table, master_node, version :arg retry_failed: Retries allocation of shards that are blocked due to too many subsequent allocation failures :arg timeout: Explicit operation timeout """ return self.transport.perform_request( "POST", "/_cluster/reroute", params=params, headers=headers, body=body ) @query_params( "flat_settings", "include_defaults", "master_timeout", "timeout", response_mimetypes=["application/json"], ) def get_settings(self, params=None, headers=None): """ Returns cluster settings. ``_ :arg flat_settings: Return settings in flat format (default: false) :arg include_defaults: Whether to return all default clusters setting. :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ return self.transport.perform_request( "GET", "/_cluster/settings", params=params, headers=headers ) @query_params( "flat_settings", "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_settings(self, body, params=None, headers=None): """ Updates the cluster settings. ``_ :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). :arg flat_settings: Return settings in flat format (default: false) :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", "/_cluster/settings", params=params, headers=headers, body=body ) @query_params( response_mimetypes=["application/json"], ) def remote_info(self, params=None, headers=None): """ Returns the information about configured remote clusters. ``_ """ return self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) @query_params( "include_disk_info", "include_yes_decisions", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def allocation_explain(self, body=None, params=None, headers=None): """ Provides explanations for shard allocations in the cluster. ``_ :arg body: The index, shard, and primary flag to explain. Empty means 'explain a randomly-chosen unassigned shard' :arg include_disk_info: Return information about disk usage and shard sizes (default: false) :arg include_yes_decisions: Return 'YES' decisions in explanation (default: false) """ return self.transport.perform_request( "POST", "/_cluster/allocation/explain", params=params, headers=headers, body=body, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_component_template(self, name, params=None, headers=None): """ Deletes a component template ``_ :arg name: The name of the template :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_component_template", name), params=params, headers=headers, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) def get_component_template(self, name=None, params=None, headers=None): """ Returns one or more component templates ``_ :arg name: The comma separated names of the component templates :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return self.transport.perform_request( "GET", _make_path("_component_template", name), params=params, headers=headers, ) @query_params( "create", "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_component_template(self, name, body, params=None, headers=None): """ Creates or updates a component template ``_ :arg name: The name of the template :arg body: The template definition :arg create: Whether the index template should only be added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_component_template", name), params=params, headers=headers, body=body, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) def exists_component_template(self, name, params=None, headers=None): """ Returns information about whether a particular component template exist ``_ :arg name: The name of the template :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "HEAD", _make_path("_component_template", name), params=params, headers=headers, ) @query_params( "wait_for_removal", response_mimetypes=["application/json"], ) def delete_voting_config_exclusions(self, params=None, headers=None): """ Clears cluster voting config exclusions. ``_ :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting configuration exclusions list. Default: True """ return self.transport.perform_request( "DELETE", "/_cluster/voting_config_exclusions", params=params, headers=headers, ) @query_params( "node_ids", "node_names", "timeout", response_mimetypes=["application/json"], ) def post_voting_config_exclusions(self, params=None, headers=None): """ Updates the cluster voting config exclusions by node ids or node names. ``_ :arg node_ids: A comma-separated list of the persistent ids of the nodes to exclude from the voting configuration. If specified, you may not also specify ?node_names. :arg node_names: A comma-separated list of the names of the nodes to exclude from the voting configuration. If specified, you may not also specify ?node_ids. :arg timeout: Explicit operation timeout Default: 30s """ return self.transport.perform_request( "POST", "/_cluster/voting_config_exclusions", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/cluster.pyi000066400000000000000000000335721426163262700233730ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class ClusterClient(NamespacedClient): def health( self, *, index: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., level: Optional[Any] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., wait_for_events: Optional[Any] = ..., wait_for_no_initializing_shards: Optional[bool] = ..., wait_for_no_relocating_shards: Optional[bool] = ..., wait_for_nodes: Optional[Any] = ..., wait_for_status: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def pending_tasks( self, *, local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def state( self, *, metric: Optional[Any] = ..., index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., wait_for_metadata_version: Optional[Any] = ..., wait_for_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stats( self, *, node_id: Optional[Any] = ..., flat_settings: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def reroute( self, *, body: Optional[Mapping[str, Any]] = ..., dry_run: Optional[bool] = ..., explain: Optional[bool] = ..., master_timeout: Optional[Any] = ..., metric: Optional[Any] = ..., retry_failed: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_settings( self, *, flat_settings: Optional[bool] = ..., include_defaults: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_settings( self, *, body: Mapping[str, Any], flat_settings: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def remote_info( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def allocation_explain( self, *, body: Optional[Mapping[str, Any]] = ..., include_disk_info: Optional[bool] = ..., include_yes_decisions: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_component_template( self, *, name: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_component_template( self, *, name: Optional[Any] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_component_template( self, *, name: Any, body: Mapping[str, Any], create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def exists_component_template( self, *, name: Any, local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def delete_voting_config_exclusions( self, *, wait_for_removal: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def post_voting_config_exclusions( self, *, node_ids: Optional[Any] = ..., node_names: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/dangling_indices.py000066400000000000000000000063441426163262700250170ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class DanglingIndicesClient(NamespacedClient): @query_params( "accept_data_loss", "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_dangling_index(self, index_uuid, params=None, headers=None): """ Deletes the specified dangling index ``_ :arg index_uuid: The UUID of the dangling index :arg accept_data_loss: Must be set to true in order to delete the dangling index :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") return self.transport.perform_request( "DELETE", _make_path("_dangling", index_uuid), params=params, headers=headers, ) @query_params( "accept_data_loss", "master_timeout", "timeout", response_mimetypes=["application/json"], ) def import_dangling_index(self, index_uuid, params=None, headers=None): """ Imports the specified dangling index ``_ :arg index_uuid: The UUID of the dangling index :arg accept_data_loss: Must be set to true in order to import the dangling index :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") return self.transport.perform_request( "POST", _make_path("_dangling", index_uuid), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def list_dangling_indices(self, params=None, headers=None): """ Returns all dangling indices. ``_ """ return self.transport.perform_request( "GET", "/_dangling", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/dangling_indices.pyi000066400000000000000000000065041426163262700251660ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class DanglingIndicesClient(NamespacedClient): def delete_dangling_index( self, *, index_uuid: Any, accept_data_loss: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def import_dangling_index( self, *, index_uuid: Any, accept_data_loss: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def list_dangling_indices( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/data_frame.py000066400000000000000000000135161426163262700236200ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class Data_FrameClient(NamespacedClient): @query_params() def delete_data_frame_transform(self, transform_id, params=None, headers=None): """ ``_ :arg transform_id: The id of the transform to delete """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return self.transport.perform_request( "DELETE", _make_path("_data_frame", "transforms", transform_id), params=params, headers=headers, ) @query_params("from_", "size") def get_data_frame_transform(self, transform_id=None, params=None, headers=None): """ ``_ :arg transform_id: The id or comma delimited list of id expressions of the transforms to get, '_all' or '*' implies get all transforms :arg from_: skips a number of transform configs, defaults to 0 :arg size: specifies a max number of transforms to get, defaults to 100 """ return self.transport.perform_request( "GET", _make_path("_data_frame", "transforms", transform_id), params=params, headers=headers, ) @query_params() def get_data_frame_transform_stats( self, transform_id=None, params=None, headers=None ): """ ``_ :arg transform_id: The id of the transform for which to get stats. '_all' or '*' implies all transforms """ return self.transport.perform_request( "GET", _make_path("_data_frame", "transforms", transform_id, "_stats"), params=params, headers=headers, ) @query_params() def preview_data_frame_transform(self, body, params=None, headers=None): """ ``_ :arg body: The definition for the data_frame transform to preview """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_data_frame/transforms/_preview", params=params, headers=headers, body=body, ) @query_params() def put_data_frame_transform(self, transform_id, body, params=None, headers=None): """ ``_ :arg transform_id: The id of the new transform. :arg body: The data frame transform definition """ for param in (transform_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_data_frame", "transforms", transform_id), params=params, headers=headers, body=body, ) @query_params("timeout") def start_data_frame_transform(self, transform_id, params=None, headers=None): """ ``_ :arg transform_id: The id of the transform to start :arg timeout: Controls the time to wait for the transform to start """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return self.transport.perform_request( "POST", _make_path("_data_frame", "transforms", transform_id, "_start"), params=params, headers=headers, ) @query_params("timeout", "wait_for_completion") def stop_data_frame_transform(self, transform_id, params=None, headers=None): """ ``_ :arg transform_id: The id of the transform to stop :arg timeout: Controls the time to wait until the transform has stopped. Default to 30 seconds :arg wait_for_completion: Whether to wait for the transform to fully stop before returning or not. Default to false """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return self.transport.perform_request( "POST", _make_path("_data_frame", "transforms", transform_id, "_stop"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/data_frame.pyi000066400000000000000000000126441426163262700237720ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class Data_FrameClient(NamespacedClient): def delete_data_frame_transform( self, transform_id: Any, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... def get_data_frame_transform( self, *, transform_id: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... def get_data_frame_transform_stats( self, transform_id: Optional[Any], *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... def preview_data_frame_transform( self, *, body: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... def put_data_frame_transform( self, transform_id: Any, *, body: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... def start_data_frame_transform( self, transform_id: Any, *, timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... def stop_data_frame_transform( self, transform_id: Any, *, timeout: Optional[Any] = ..., wait_for_completion: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... elasticsearch-py-7.17.6/elasticsearch/client/deprecation.py000066400000000000000000000024541426163262700240310ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, _make_path, query_params class DeprecationClient(NamespacedClient): @query_params() def info(self, index=None, params=None, headers=None): """ ``_ :arg index: Index pattern """ return self.transport.perform_request( "GET", _make_path(index, "_xpack", "migration", "deprecations"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/deprecation.pyi000066400000000000000000000031761426163262700242040ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class DeprecationClient(NamespacedClient): def info( self, *, index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Any: ... elasticsearch-py-7.17.6/elasticsearch/client/enrich.py000066400000000000000000000102461426163262700230020ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class EnrichClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete_policy(self, name, params=None, headers=None): """ Deletes an existing enrich policy and its enrich index. ``_ :arg name: The name of the enrich policy """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_enrich", "policy", name), params=params, headers=headers, ) @query_params( "wait_for_completion", response_mimetypes=["application/json"], ) def execute_policy(self, name, params=None, headers=None): """ Creates the enrich index for an existing enrich policy. ``_ :arg name: The name of the enrich policy :arg wait_for_completion: Should the request should block until the execution is complete. Default: True """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "PUT", _make_path("_enrich", "policy", name, "_execute"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_policy(self, name=None, params=None, headers=None): """ Gets information about an enrich policy. ``_ :arg name: A comma-separated list of enrich policy names """ return self.transport.perform_request( "GET", _make_path("_enrich", "policy", name), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_policy(self, name, body, params=None, headers=None): """ Creates a new enrich policy. ``_ :arg name: The name of the enrich policy :arg body: The enrich policy to register """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_enrich", "policy", name), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def stats(self, params=None, headers=None): """ Gets enrich coordinator statistics and information about enrich policies that are currently executing. ``_ """ return self.transport.perform_request( "GET", "/_enrich/_stats", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/enrich.pyi000066400000000000000000000110701426163262700231470ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class EnrichClient(NamespacedClient): def delete_policy( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def execute_policy( self, *, name: Any, wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_policy( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_policy( self, *, name: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/eql.py000066400000000000000000000112301426163262700223050ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class EqlClient(NamespacedClient): @query_params( "keep_alive", "keep_on_completion", "wait_for_completion_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def search(self, index, body, params=None, headers=None): """ Returns results matching a query expressed in Event Query Language (EQL) ``_ :arg index: The name of the index to scope the operation :arg body: Eql request body. Use the `query` to limit the query scope. :arg keep_alive: Update the time interval in which the results (partial or final) for this search will be available Default: 5d :arg keep_on_completion: Control whether the response should be stored in the cluster if it completed within the provided [wait_for_completion] time (default: false) :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path(index, "_eql", "search"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def delete(self, id, params=None, headers=None): """ Deletes an async EQL search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_eql", "search", id), params=params, headers=headers ) @query_params( "keep_alive", "wait_for_completion_timeout", response_mimetypes=["application/json"], ) def get(self, id, params=None, headers=None): """ Returns async results from previously executed Event Query Language (EQL) search ``_ :arg id: The async search ID :arg keep_alive: Update the time interval in which the results (partial or final) for this search will be available Default: 5d :arg wait_for_completion_timeout: Specify the time that the request should block waiting for the final response """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_eql", "search", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_status(self, id, params=None, headers=None): """ Returns the status of a previously submitted async or stored Event Query Language (EQL) search ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_eql", "search", "status", id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/eql.pyi000066400000000000000000000100311426163262700224540ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class EqlClient(NamespacedClient): def search( self, *, index: Any, body: Mapping[str, Any], keep_alive: Optional[Any] = ..., keep_on_completion: Optional[bool] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get( self, *, id: Any, keep_alive: Optional[Any] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_status( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/features.py000066400000000000000000000041021426163262700233420ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params( "master_timeout", response_mimetypes=["application/json"], ) def get_features(self, params=None, headers=None): """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot ``_ :arg master_timeout: Explicit operation timeout for connection to master node """ return self.transport.perform_request( "GET", "/_features", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def reset_features(self, params=None, headers=None): """ Resets the internal state of features, usually by deleting system indices ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version """ return self.transport.perform_request( "POST", "/_features/_reset", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/features.pyi000066400000000000000000000045341426163262700235240ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class FeaturesClient(NamespacedClient): def get_features( self, *, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def reset_features( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/fleet.py000066400000000000000000000114201426163262700226240ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, _make_path, query_params class FleetClient(NamespacedClient): @query_params( "checkpoints", "timeout", "wait_for_advance", "wait_for_index", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def global_checkpoints(self, index, params=None, headers=None): """ Returns the current global checkpoints for an index. This API is design for internal use by the fleet server project. ``_ :arg index: The name of the index. :arg checkpoints: Comma separated list of checkpoints :arg timeout: Timeout to wait for global checkpoint to advance Default: 30s :arg wait_for_advance: Whether to wait for the global checkpoint to advance past the specified current checkpoints Default: false :arg wait_for_index: Whether to wait for the target index to exist and all primary shards be active Default: false """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index, "_fleet", "global_checkpoints"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) def msearch(self, body, index=None, params=None, headers=None): """ Multi Search API where the search will only be executed after specified checkpoints are available due to a refresh. This API is designed for internal use by the fleet server project. .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg body: The request definitions (metadata-fleet search request definition pairs), separated by newlines :arg index: The index name to use as the default """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", _make_path(index, "_fleet", "_fleet_msearch"), params=params, headers=headers, body=body, ) @query_params( "allow_partial_search_results", "wait_for_checkpoints", "wait_for_checkpoints_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def search(self, index, body=None, params=None, headers=None): """ Search API where the search will only be executed after specified checkpoints are available due to a refresh. This API is designed for internal use by the fleet server project. .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The index name to search. :arg body: The search definition using the Query DSL :arg allow_partial_search_results: Indicate if an error should be returned if there is a partial search failure or timeout Default: True :arg wait_for_checkpoints: Comma separated list of checkpoints, one per shard :arg wait_for_checkpoints_timeout: Explicit wait_for_checkpoints timeout """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_fleet", "_fleet_search"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/fleet.pyi000066400000000000000000000070611426163262700230030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class FleetClient(NamespacedClient): def global_checkpoints( self, *, index: Any, checkpoints: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_advance: Optional[bool] = ..., wait_for_index: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def msearch( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def search( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., allow_partial_search_results: Optional[bool] = ..., wait_for_checkpoints: Optional[Any] = ..., wait_for_checkpoints_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/graph.py000066400000000000000000000041141426163262700226300ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class GraphClient(NamespacedClient): @query_params( "routing", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def explore(self, index, body=None, doc_type=None, params=None, headers=None): """ Explore extracted and summarized information about the documents and terms in an index. ``_ :arg index: A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices :arg body: Graph Query DSL :arg doc_type: A comma-separated list of document types to search; leave empty to perform the operation on all types :arg routing: Specific routing value :arg timeout: Explicit operation timeout """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, doc_type, "_graph", "explore"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/graph.pyi000066400000000000000000000035161426163262700230060ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class GraphClient(NamespacedClient): def explore( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., doc_type: Optional[Any] = ..., routing: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/ilm.py000066400000000000000000000207511426163262700223150ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IlmClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete_lifecycle(self, policy, params=None, headers=None): """ Deletes the specified lifecycle policy definition. A currently used policy cannot be deleted. ``_ :arg policy: The name of the index lifecycle policy """ if policy in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy'.") return self.transport.perform_request( "DELETE", _make_path("_ilm", "policy", policy), params=params, headers=headers, ) @query_params( "only_errors", "only_managed", response_mimetypes=["application/json"], ) def explain_lifecycle(self, index, params=None, headers=None): """ Retrieves information about the index's current lifecycle state, such as the currently executing phase, action, and step. ``_ :arg index: The name of the index to explain :arg only_errors: filters the indices included in the response to ones in an ILM error state, implies only_managed :arg only_managed: filters the indices included in the response to ones managed by ILM """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index, "_ilm", "explain"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_lifecycle(self, policy=None, params=None, headers=None): """ Returns the specified policy definition. Includes the policy version and last modified date. ``_ :arg policy: The name of the index lifecycle policy """ return self.transport.perform_request( "GET", _make_path("_ilm", "policy", policy), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_status(self, params=None, headers=None): """ Retrieves the current index lifecycle management (ILM) status. ``_ """ return self.transport.perform_request( "GET", "/_ilm/status", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def move_to_step(self, index, body=None, params=None, headers=None): """ Manually moves an index into the specified step and executes that step. ``_ :arg index: The name of the index whose lifecycle step is to change :arg body: The new lifecycle step to move to """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path("_ilm", "move", index), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_lifecycle(self, policy, body=None, params=None, headers=None): """ Creates a lifecycle policy ``_ :arg policy: The name of the index lifecycle policy :arg body: The lifecycle policy definition to register """ if policy in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy'.") return self.transport.perform_request( "PUT", _make_path("_ilm", "policy", policy), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def remove_policy(self, index, params=None, headers=None): """ Removes the assigned lifecycle policy and stops managing the specified index ``_ :arg index: The name of the index to remove policy on """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_ilm", "remove"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def retry(self, index, params=None, headers=None): """ Retries executing the policy for an index that is in the ERROR step. ``_ :arg index: The name of the indices (comma-separated) whose failed lifecycle step is to be retry """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_ilm", "retry"), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def start(self, params=None, headers=None): """ Start the index lifecycle management (ILM) plugin. ``_ """ return self.transport.perform_request( "POST", "/_ilm/start", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def stop(self, params=None, headers=None): """ Halts all lifecycle management operations and stops the index lifecycle management (ILM) plugin ``_ """ return self.transport.perform_request( "POST", "/_ilm/stop", params=params, headers=headers ) @query_params( "dry_run", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def migrate_to_data_tiers(self, body=None, params=None, headers=None): """ Migrates the indices and ILM policies away from custom node attribute allocation routing to data tiers routing ``_ :arg body: Optionally specify a legacy index template name to delete and optionally specify a node attribute name used for index shard routing (defaults to "data") :arg dry_run: If set to true it will simulate the migration, providing a way to retrieve the ILM policies and indices that need to be migrated. The default is false """ return self.transport.perform_request( "POST", "/_ilm/migrate_to_data_tiers", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/ilm.pyi000066400000000000000000000216501426163262700224650ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class IlmClient(NamespacedClient): def delete_lifecycle( self, *, policy: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def explain_lifecycle( self, *, index: Any, only_errors: Optional[bool] = ..., only_managed: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_lifecycle( self, *, policy: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def move_to_step( self, *, index: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_lifecycle( self, *, policy: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def remove_policy( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def retry( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def start( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stop( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def migrate_to_data_tiers( self, *, body: Optional[Mapping[str, Any]] = ..., dry_run: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/indices.py000066400000000000000000002352671426163262700231640ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def analyze(self, body=None, index=None, params=None, headers=None): """ Performs the analysis process on a text and return the tokens breakdown of the text. ``_ :arg body: Define analyzer/tokenizer parameters and the text on which the analysis should be performed :arg index: The name of the index to scope the operation """ return self.transport.perform_request( "POST", _make_path(index, "_analyze"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) def refresh(self, index=None, params=None, headers=None): """ Performs the refresh operation in one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return self.transport.perform_request( "POST", _make_path(index, "_refresh"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "force", "ignore_unavailable", "wait_if_ongoing", response_mimetypes=["application/json"], ) def flush(self, index=None, params=None, headers=None): """ Performs the flush operation on one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string for all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg force: Whether a flush should be forced even if it is not necessarily needed ie. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as internal) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg wait_if_ongoing: If set to true the flush operation will block until the flush can be executed if another flush operation is already executing. The default is true. If set to false the flush will be skipped iff if another flush operation is already running. """ return self.transport.perform_request( "POST", _make_path(index, "_flush"), params=params, headers=headers ) @query_params( "include_type_name", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], body_params=["aliases", "mappings", "settings"], ) def create(self, index, body=None, params=None, headers=None): """ Creates an index with optional settings and mappings. ``_ :arg index: The name of the index :arg body: The configuration for the index (`settings` and `mappings`) :arg aliases: :arg include_type_name: Whether a type should be expected in the body of the mappings. :arg mappings: Mapping for fields in the index. If specified, this mapping can include: - Field names - Field data types - Mapping parameters :arg master_timeout: Specify timeout for connection to master :arg settings: :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "PUT", _make_path(index), params=params, headers=headers, body=body ) @query_params( "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def clone(self, index, target, body=None, params=None, headers=None): """ Clones an index ``_ :arg index: The name of the source index to clone :arg target: The name of the target index to clone into :arg body: The configuration for the target index (`settings` and `aliases`) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the cloned index before the operation returns. """ for param in (index, target): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path(index, "_clone", target), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "include_type_name", "local", "master_timeout", response_mimetypes=["application/json"], ) def get(self, index, params=None, headers=None): """ Returns information about one or more indices. ``_ :arg index: Comma-separated list of data streams, indices, and index aliases used to limit the request. Wildcard expressions (*) are supported. :arg allow_no_indices: Ignore if a wildcard expression resolves to no concrete indices (default: false) Default: True :arg expand_wildcards: Type of index that wildcard expressions can match. If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams. Supports comma-separated values, such as open,hidden. Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: If true, returns settings in flat format. :arg ignore_unavailable: If false, requests that target a missing index return an error. :arg include_defaults: If true, return all default settings in the response. :arg include_type_name: If true, a mapping type is expected in the body of mappings. :arg local: If true, the request retrieves information from the local node only. Defaults to false, which means information is retrieved from the master node. :arg master_timeout: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. Default: 30s """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) def open(self, index, params=None, headers=None): """ Opens an index. ``_ :arg index: A comma separated list of indices to open :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: closed :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_open"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) def close(self, index, params=None, headers=None): """ Closes an index. ``_ :arg index: A comma separated list of indices to close :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. Set to `index-setting` to wait according to the index setting `index.write.wait_for_active_shards`, or `all` to wait for all shards, or an integer. Defaults to `0`. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_close"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete(self, index, params=None, headers=None): """ Deletes an index. ``_ :arg index: A comma-separated list of indices to delete; use `_all` or `*` string to delete all indices :arg allow_no_indices: Ignore if a wildcard expression resolves to no concrete indices (default: false) :arg expand_wildcards: Whether wildcard expressions should get expanded to open, closed, or hidden indices Valid choices: open, closed, hidden, none, all Default: open,closed :arg ignore_unavailable: Ignore unavailable indexes (default: false) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "DELETE", _make_path(index), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", response_mimetypes=["application/json"], ) def exists(self, index, params=None, headers=None): """ Returns information about whether a particular index exists. ``_ :arg index: A comma-separated list of index names :arg allow_no_indices: Ignore if a wildcard expression resolves to no concrete indices (default: false) :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Ignore unavailable indexes (default: false) :arg include_defaults: Whether to return all default setting for each of the indices. :arg local: Return local information, do not retrieve the state from master node (default: false) """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "HEAD", _make_path(index), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", response_mimetypes=["application/json"], ) def exists_type(self, index, doc_type, params=None, headers=None): """ Returns information about whether a particular document type exists. (DEPRECATED) ``_ :arg index: A comma-separated list of index names; use `_all` to check the types across all indices :arg doc_type: A comma-separated list of document types to check :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ for param in (index, doc_type): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "HEAD", _make_path(index, "_mapping", doc_type), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "include_type_name", "master_timeout", "timeout", "write_index_only", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_mapping(self, body, index=None, doc_type=None, params=None, headers=None): """ Updates the index mappings. ``_ :arg body: The mapping definition :arg index: A comma-separated list of index names the mapping should be added to (supports wildcards); use `_all` or omit to add the mapping on all indices. :arg doc_type: The name of the document type :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_type_name: Whether a type should be expected in the body of the mappings. :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg write_index_only: When true, applies mappings only to the write index of an alias or data stream """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") if doc_type not in SKIP_IN_PATH and index in SKIP_IN_PATH: index = "_all" return self.transport.perform_request( "PUT", _make_path(index, doc_type, "_mapping"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "include_type_name", "local", "master_timeout", response_mimetypes=["application/json"], ) def get_mapping(self, index=None, doc_type=None, params=None, headers=None): """ Returns mappings for one or more indices. ``_ :arg index: A comma-separated list of index names :arg doc_type: A comma-separated list of document types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_type_name: Whether to add the type name to the response (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master """ return self.transport.perform_request( "GET", _make_path(index, "_mapping", doc_type), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "include_defaults", "include_type_name", "local", response_mimetypes=["application/json"], ) def get_field_mapping( self, fields, index=None, doc_type=None, params=None, headers=None ): """ Returns mapping for one or more fields. ``_ :arg fields: A comma-separated list of fields :arg index: A comma-separated list of index names :arg doc_type: A comma-separated list of document types :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_defaults: Whether the default mapping values should be returned as well :arg include_type_name: Whether a type should be returned in the body of the mappings. :arg local: Return local information, do not retrieve the state from master node (default: false) """ if fields in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'fields'.") return self.transport.perform_request( "GET", _make_path(index, "_mapping", doc_type, "field", fields), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_alias(self, index, name, body=None, params=None, headers=None): """ Creates or updates an alias. ``_ :arg index: A comma-separated list of index names the alias should point to (supports wildcards); use `_all` to perform the operation on all indices. :arg name: The name of the alias to be created or updated :arg body: The settings for the alias, such as `routing` or `filter` :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit timestamp for the document """ for param in (index, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path(index, "_alias", name), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", response_mimetypes=["application/json"], ) def exists_alias(self, name, index=None, params=None, headers=None): """ Returns information about whether a particular alias exists. ``_ :arg name: A comma-separated list of alias names to return :arg index: A comma-separated list of index names to filter aliases :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "HEAD", _make_path(index, "_alias", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "local", response_mimetypes=["application/json"], ) def get_alias(self, index=None, name=None, params=None, headers=None): """ Returns an alias. ``_ :arg index: A comma-separated list of index names to filter aliases :arg name: A comma-separated list of alias names to return :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ return self.transport.perform_request( "GET", _make_path(index, "_alias", name), params=params, headers=headers ) @query_params( "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_aliases(self, body, params=None, headers=None): """ Updates index aliases. ``_ :arg body: The definition of `actions` to perform :arg master_timeout: Specify timeout for connection to master :arg timeout: Request timeout """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_aliases", params=params, headers=headers, body=body ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_alias(self, index, name, params=None, headers=None): """ Deletes an alias. ``_ :arg index: A comma-separated list of index names (supports wildcards); use `_all` for all indices :arg name: A comma-separated list of aliases to delete (supports wildcards); use `_all` to delete all aliases for the specified indices. :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit timestamp for the document """ for param in (index, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path(index, "_aliases", name), params=params, headers=headers, ) @query_params( "create", "include_type_name", "master_timeout", "order", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. ``_ :arg name: The name of the template :arg body: The template definition :arg create: Whether the index template should only be added if new or can also replace an existing one :arg include_type_name: Whether a type should be returned in the body of the mappings. :arg master_timeout: Specify timeout for connection to master :arg order: The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower numbers) """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_template", name), params=params, headers=headers, body=body, ) @query_params( "flat_settings", "local", "master_timeout", response_mimetypes=["application/json"], ) def exists_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. ``_ :arg name: The comma separated names of the index templates :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "HEAD", _make_path("_template", name), params=params, headers=headers ) @query_params( "flat_settings", "include_type_name", "local", "master_timeout", response_mimetypes=["application/json"], ) def get_template(self, name=None, params=None, headers=None): """ Returns an index template. ``_ :arg name: The comma separated names of the index templates :arg flat_settings: Return settings in flat format (default: false) :arg include_type_name: Whether a type should be returned in the body of the mappings. :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return self.transport.perform_request( "GET", _make_path("_template", name), params=params, headers=headers ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_template(self, name, params=None, headers=None): """ Deletes an index template. ``_ :arg name: The name of the template :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_template", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", response_mimetypes=["application/json"], ) def get_settings(self, index=None, name=None, params=None, headers=None): """ Returns settings for one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg name: The name of the settings that should be included :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: all :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg include_defaults: Whether to return all default setting for each of the indices. :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Specify timeout for connection to master """ return self.transport.perform_request( "GET", _make_path(index, "_settings", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flat_settings", "ignore_unavailable", "master_timeout", "preserve_existing", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_settings(self, body, index=None, params=None, headers=None): """ Updates the index settings. ``_ :arg body: The index settings to be updated :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flat_settings: Return settings in flat format (default: false) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg preserve_existing: Whether to update existing settings. If set to `true` existing settings on an index remain unchanged, the default is `false` :arg timeout: Explicit operation timeout """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", _make_path(index, "_settings"), params=params, headers=headers, body=body, ) @query_params( "completion_fields", "expand_wildcards", "fielddata_fields", "fields", "forbid_closed_indices", "groups", "include_segment_file_sizes", "include_unloaded_segments", "level", "types", response_mimetypes=["application/json"], ) def stats(self, index=None, metric=None, params=None, headers=None): """ Provides statistics on operations happening in an index. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg metric: Limit the information returned the specific metrics. :arg completion_fields: A comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fielddata_fields: A comma-separated list of fields for `fielddata` index metric (supports wildcards) :arg fields: A comma-separated list of fields for `fielddata` and `completion` index metric (supports wildcards) :arg forbid_closed_indices: If set to false stats will also collected from closed indices if explicitly specified or if expand_wildcards expands to closed indices Default: True :arg groups: A comma-separated list of search groups for `search` index metric :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested) :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg level: Return stats aggregated at cluster, index or shard level Valid choices: cluster, indices, shards Default: indices :arg types: A comma-separated list of document types for the `indexing` index metric """ return self.transport.perform_request( "GET", _make_path(index, "_stats", metric), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose", response_mimetypes=["application/json"], ) def segments(self, index=None, params=None, headers=None): """ Provides low-level information about segments in a Lucene index. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg verbose: Includes detailed memory usage by Lucene. """ return self.transport.perform_request( "GET", _make_path(index, "_segments"), params=params, headers=headers ) @query_params( "all_shards", "allow_no_indices", "analyze_wildcard", "analyzer", "default_operator", "df", "expand_wildcards", "explain", "ignore_unavailable", "lenient", "q", "rewrite", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def validate_query( self, body=None, index=None, doc_type=None, params=None, headers=None ): """ Allows a user to validate a potentially expensive query without executing it. ``_ :arg body: The query definition specified with the Query DSL :arg index: A comma-separated list of index names to restrict the operation; use `_all` or empty string to perform the operation on all indices :arg doc_type: A comma-separated list of document types to restrict the operation; leave empty to perform the operation on all types :arg all_shards: Execute validation on all shards instead of one random shard per index :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg analyze_wildcard: Specify whether wildcard and prefix queries should be analyzed (default: false) :arg analyzer: The analyzer to use for the query string :arg default_operator: The default operator for query string query (AND or OR) Valid choices: AND, OR Default: OR :arg df: The field to use as default where no field prefix is given in the query string :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg explain: Return detailed information about the error :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg lenient: Specify whether format-based query failures (such as providing text to a numeric field) should be ignored :arg q: Query in the Lucene query string syntax :arg rewrite: Provide a more detailed explanation showing the actual Lucene query that will be executed. """ return self.transport.perform_request( "POST", _make_path(index, doc_type, "_validate", "query"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "fielddata", "fields", "ignore_unavailable", "query", "request", response_mimetypes=["application/json"], ) def clear_cache(self, index=None, params=None, headers=None): """ Clears all or specific caches for one or more indices. ``_ :arg index: A comma-separated list of index name to limit the operation :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fielddata: Clear field data :arg fields: A comma-separated list of fields to clear when using the `fielddata` parameter (default: all) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg query: Clear query caches :arg request: Clear request cache """ return self.transport.perform_request( "POST", _make_path(index, "_cache", "clear"), params=params, headers=headers ) @query_params( "active_only", "detailed", response_mimetypes=["application/json"], ) def recovery(self, index=None, params=None, headers=None): """ Returns information about ongoing index shard recoveries. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg active_only: Display only those recoveries that are currently on-going :arg detailed: Whether to display detailed information about shard recovery """ return self.transport.perform_request( "GET", _make_path(index, "_recovery"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "only_ancient_segments", "wait_for_completion", response_mimetypes=["application/json"], ) def upgrade(self, index=None, params=None, headers=None): """ DEPRECATED Upgrades to the current version of Lucene. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg only_ancient_segments: If true, only ancient (an older Lucene major release) segments will be upgraded :arg wait_for_completion: Specify whether the request should block until the all segments are upgraded (default: false) """ return self.transport.perform_request( "POST", _make_path(index, "_upgrade"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) def get_upgrade(self, index=None, params=None, headers=None): """ DEPRECATED Returns a progress status of current upgrade. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return self.transport.perform_request( "GET", _make_path(index, "_upgrade"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) def flush_synced(self, index=None, params=None, headers=None): """ Performs a synced flush operation on one or more indices. Synced flush is deprecated and will be removed in 8.0. Use flush instead ``_ :arg index: A comma-separated list of index names; use `_all` or empty string for all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return self.transport.perform_request( "POST", _make_path(index, "_flush", "synced"), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status", response_mimetypes=["application/json"], ) def shard_stores(self, index=None, params=None, headers=None): """ Provides store information for shard copies of indices. ``_ :arg index: List of data streams, indices, and aliases used to limit the request. :arg allow_no_indices: If false, the request returns an error if any wildcard expression, index alias, or _all value targets only missing or closed indices. This behavior applies even if the request targets other open indices. :arg expand_wildcards: Type of index that wildcard patterns can match. If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: If true, missing or closed indices are not included in the response. :arg status: List of shard health statuses used to limit the request. Valid choices: green, yellow, red, all """ return self.transport.perform_request( "GET", _make_path(index, "_shard_stores"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "flush", "ignore_unavailable", "max_num_segments", "only_expunge_deletes", response_mimetypes=["application/json"], ) def forcemerge(self, index=None, params=None, headers=None): """ Performs the force merge operation on one or more indices. ``_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flush: Specify whether the index should be flushed after performing the operation (default: true) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg max_num_segments: The number of segments the index should be merged into (default: dynamic) :arg only_expunge_deletes: Specify whether the operation should only expunge deleted documents """ return self.transport.perform_request( "POST", _make_path(index, "_forcemerge"), params=params, headers=headers ) @query_params( "copy_settings", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def shrink(self, index, target, body=None, params=None, headers=None): """ Allow to shrink an existing index into a new index with fewer primary shards. ``_ :arg index: The name of the source index to shrink :arg target: The name of the target index to shrink into :arg body: The configuration for the target index (`settings` and `aliases`) :arg copy_settings: whether or not to copy settings from the source index (defaults to false) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ for param in (index, target): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path(index, "_shrink", target), params=params, headers=headers, body=body, ) @query_params( "copy_settings", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def split(self, index, target, body=None, params=None, headers=None): """ Allows you to split an existing index into a new index with more primary shards. ``_ :arg index: The name of the source index to split :arg target: The name of the target index to split into :arg body: The configuration for the target index (`settings` and `aliases`) :arg copy_settings: whether or not to copy settings from the source index (defaults to false) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ for param in (index, target): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path(index, "_split", target), params=params, headers=headers, body=body, ) @query_params( "dry_run", "include_type_name", "master_timeout", "timeout", "wait_for_active_shards", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def rollover(self, alias, body=None, new_index=None, params=None, headers=None): """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. ``_ :arg alias: The name of the alias to rollover :arg body: The conditions that needs to be met for executing rollover :arg new_index: The name of the rollover index :arg dry_run: If set to true the rollover action will only be validated but not actually performed even if a condition matches. The default is false :arg include_type_name: Whether a type should be included in the body of the mappings. :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Set the number of active shards to wait for on the newly created rollover index before the operation returns. """ if alias in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'alias'.") return self.transport.perform_request( "POST", _make_path(alias, "_rollover", new_index), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) def freeze(self, index, params=None, headers=None): """ Freezes an index. A frozen index has almost no overhead on the cluster (except for maintaining its metadata in memory) and is read-only. ``_ :arg index: The name of the index to freeze :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: closed :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_freeze"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", "wait_for_active_shards", response_mimetypes=["application/json"], ) def unfreeze(self, index, params=None, headers=None): """ Unfreezes an index. When a frozen index is unfrozen, the index goes through the normal recovery process and becomes writeable again. ``_ :arg index: The name of the index to unfreeze :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: closed :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_unfreeze"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) def reload_search_analyzers(self, index, params=None, headers=None): """ Reloads an index's search analyzers and their resources. ``_ :arg index: A comma-separated list of index names to reload analyzers for :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index, "_reload_search_analyzers"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def create_data_stream(self, name, params=None, headers=None): """ Creates a data stream ``_ :arg name: The name of the data stream """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "PUT", _make_path("_data_stream", name), params=params, headers=headers ) @query_params( "expand_wildcards", response_mimetypes=["application/json"], ) def delete_data_stream(self, name, params=None, headers=None): """ Deletes a data stream. ``_ :arg name: A comma-separated list of data streams to delete; use `*` to delete all data streams :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_data_stream", name), params=params, headers=headers ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_index_template(self, name, params=None, headers=None): """ Deletes an index template. ``_ :arg name: The name of the template :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_index_template", name), params=params, headers=headers, ) @query_params( "flat_settings", "local", "master_timeout", response_mimetypes=["application/json"], ) def exists_index_template(self, name, params=None, headers=None): """ Returns information about whether a particular index template exists. ``_ :arg name: The name of the template :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "HEAD", _make_path("_index_template", name), params=params, headers=headers ) @query_params( "flat_settings", "local", "master_timeout", response_mimetypes=["application/json"], ) def get_index_template(self, name=None, params=None, headers=None): """ Returns an index template. ``_ :arg name: A pattern that returned template names must match :arg flat_settings: Return settings in flat format (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return self.transport.perform_request( "GET", _make_path("_index_template", name), params=params, headers=headers ) @query_params( "cause", "create", "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_index_template(self, name, body, params=None, headers=None): """ Creates or updates an index template. ``_ :arg name: The name of the template :arg body: The template definition :arg cause: User defined reason for creating/updating the index template :arg create: Whether the index template should only be added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_index_template", name), params=params, headers=headers, body=body, ) @query_params( "cause", "create", "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def simulate_index_template(self, name, body=None, params=None, headers=None): """ Simulate matching the given index name against the index templates in the system ``_ :arg name: The name of the index (it must be a concrete index name) :arg body: New index template definition, which will be included in the simulation, as if it already exists in the system :arg cause: User defined reason for dry-run creating the new template for simulation purposes :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "POST", _make_path("_index_template", "_simulate_index", name), params=params, headers=headers, body=body, ) @query_params( "expand_wildcards", response_mimetypes=["application/json"], ) def get_data_stream(self, name=None, params=None, headers=None): """ Returns data streams. ``_ :arg name: A comma-separated list of data streams to get; use `*` to get all data streams :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open """ return self.transport.perform_request( "GET", _make_path("_data_stream", name), params=params, headers=headers ) @query_params( "cause", "create", "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def simulate_template(self, body=None, name=None, params=None, headers=None): """ Simulate resolving the given template name or body ``_ :arg body: New index template definition to be simulated, if no index template name is specified :arg name: The name of the index template :arg cause: User defined reason for dry-run creating the new template for simulation purposes :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an existing one :arg master_timeout: Specify timeout for connection to master """ return self.transport.perform_request( "POST", _make_path("_index_template", "_simulate", name), params=params, headers=headers, body=body, ) @query_params( "expand_wildcards", response_mimetypes=["application/json"], ) def resolve_index(self, name, params=None, headers=None): """ Returns information about any matching indices, aliases, and data streams ``_ :arg name: A comma-separated list of names or wildcard expressions :arg expand_wildcards: Whether wildcard expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all Default: open """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "GET", _make_path("_resolve", "index", name), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "master_timeout", "timeout", response_mimetypes=["application/json"], ) def add_block(self, index, block, params=None, headers=None): """ Adds a block to an index. ``_ :arg index: A comma separated list of indices to add a block to :arg block: The block to add (one of read, write, read_only or metadata) :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg master_timeout: Specify timeout for connection to master :arg timeout: Explicit operation timeout """ for param in (index, block): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path(index, "_block", block), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def data_streams_stats(self, name=None, params=None, headers=None): """ Provides statistics on operations happening in a data stream. ``_ :arg name: A comma-separated list of data stream names; use `_all` or empty string to perform the operation on all data streams """ return self.transport.perform_request( "GET", _make_path("_data_stream", name, "_stats"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def promote_data_stream(self, name, params=None, headers=None): """ Promotes a data stream from a replicated data stream managed by CCR to a regular data stream ``_ :arg name: The name of the data stream """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "POST", _make_path("_data_stream", "_promote", name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def migrate_to_data_stream(self, name, params=None, headers=None): """ Migrates an alias to a data stream ``_ :arg name: The name of the alias to migrate """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "POST", _make_path("_data_stream", "_migrate", name), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "flush", "ignore_unavailable", "run_expensive_tasks", response_mimetypes=["application/json"], ) def disk_usage(self, index, params=None, headers=None): """ Analyzes the disk usage of each field of an index or data stream ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: Comma-separated list of indices or data streams to analyze the disk usage :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg flush: Whether flush or not before analyzing the index disk usage. Defaults to true :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg run_expensive_tasks: Must be set to [true] in order for the task to be performed. Defaults to false. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "POST", _make_path(index, "_disk_usage"), params=params, headers=headers ) @query_params( "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable", response_mimetypes=["application/json"], ) def field_usage_stats(self, index, params=None, headers=None): """ Returns the field usage stats for each field of an index ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, hidden, none, all Default: open :arg fields: A comma-separated list of fields to include in the stats if only a subset of fields should be returned (supports wildcards) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index, "_field_usage_stats"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def modify_data_stream(self, body, params=None, headers=None): """ Modifies a data stream ``_ :arg body: The data stream modifications """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_data_stream/_modify", params=params, headers=headers, body=body ) elasticsearch-py-7.17.6/elasticsearch/client/indices.pyi000066400000000000000000001640211426163262700233220ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, List, Mapping, MutableMapping, Optional, Tuple, Union, ) from typing_extensions import Literal from .utils import NamespacedClient class IndicesClient(NamespacedClient): def analyze( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def refresh( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def flush( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., force: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., wait_if_ongoing: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def create( self, *, index: str, body: Optional[Mapping[str, Any]] = ..., aliases: Optional[Mapping[str, Mapping[str, Any]]] = ..., include_type_name: Optional[bool] = ..., mappings: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Union[int, str]] = ..., settings: Optional[Mapping[str, Any]] = ..., timeout: Optional[Union[int, str]] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clone( self, *, index: Any, target: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def open( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def close( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., wait_for_active_shards: Optional[Union[Union[Literal["all"], str], int]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def exists( self, *, index: Union[List[str], str], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def exists_type( self, *, index: Any, doc_type: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def put_mapping( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., doc_type: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., include_type_name: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., write_index_only: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_mapping( self, *, index: Optional[Union[List[str], str]] = ..., doc_type: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_field_mapping( self, *, fields: Union[List[str], str], index: Optional[Union[List[str], str]] = ..., doc_type: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_alias( self, *, index: Any, name: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def exists_alias( self, *, name: Union[List[str], str], index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def get_alias( self, *, index: Optional[Union[List[str], str]] = ..., name: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_aliases( self, *, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_alias( self, *, index: Union[List[str], str], name: Union[List[str], str], master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_template( self, *, name: Any, body: Mapping[str, Any], create: Optional[bool] = ..., include_type_name: Optional[bool] = ..., master_timeout: Optional[Any] = ..., order: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def exists_template( self, *, name: Union[List[str], str], flat_settings: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def get_template( self, *, name: Optional[Union[List[str], str]] = ..., flat_settings: Optional[bool] = ..., include_type_name: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_template( self, *, name: str, master_timeout: Optional[Union[int, str]] = ..., timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_settings( self, *, index: Optional[Union[List[str], str]] = ..., name: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., include_defaults: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Union[int, str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_settings( self, *, body: Mapping[str, Any], index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flat_settings: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., preserve_existing: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stats( self, *, index: Optional[Union[List[str], str]] = ..., metric: Optional[Union[List[str], str]] = ..., completion_fields: Optional[Union[List[str], str]] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., fielddata_fields: Optional[Union[List[str], str]] = ..., fields: Optional[Union[List[str], str]] = ..., forbid_closed_indices: Optional[bool] = ..., groups: Optional[Union[List[str], str]] = ..., include_segment_file_sizes: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., level: Optional[Union[Literal["cluster", "indices", "shards"], str]] = ..., types: Optional[Union[List[str], str]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def segments( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def validate_query( self, *, body: Optional[Mapping[str, Any]] = ..., index: Optional[Any] = ..., doc_type: Optional[Any] = ..., all_shards: Optional[bool] = ..., allow_no_indices: Optional[bool] = ..., analyze_wildcard: Optional[bool] = ..., analyzer: Optional[Any] = ..., default_operator: Optional[Any] = ..., df: Optional[Any] = ..., expand_wildcards: Optional[Any] = ..., explain: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., lenient: Optional[bool] = ..., q: Optional[Any] = ..., rewrite: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_cache( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., fielddata: Optional[bool] = ..., fields: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., query: Optional[bool] = ..., request: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def recovery( self, *, index: Optional[Any] = ..., active_only: Optional[bool] = ..., detailed: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def upgrade( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., only_ancient_segments: Optional[bool] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_upgrade( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def flush_synced( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def shard_stores( self, *, index: Optional[Union[List[str], str]] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., ignore_unavailable: Optional[bool] = ..., status: Optional[ Union[ List[Union[Literal["all", "green", "red", "yellow"], str]], Union[Literal["all", "green", "red", "yellow"], str], ] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def forcemerge( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flush: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., max_num_segments: Optional[Any] = ..., only_expunge_deletes: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def shrink( self, *, index: Any, target: Any, body: Optional[Mapping[str, Any]] = ..., copy_settings: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def split( self, *, index: Any, target: Any, body: Optional[Mapping[str, Any]] = ..., copy_settings: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def rollover( self, *, alias: Any, body: Optional[Mapping[str, Any]] = ..., new_index: Optional[Any] = ..., dry_run: Optional[bool] = ..., include_type_name: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def freeze( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def unfreeze( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_active_shards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def reload_search_analyzers( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def create_data_stream( self, *, name: str, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_data_stream( self, *, name: Union[List[str], str], expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_index_template( self, *, name: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def exists_index_template( self, *, name: Any, flat_settings: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> bool: ... def get_index_template( self, *, name: Optional[Any] = ..., flat_settings: Optional[bool] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_index_template( self, *, name: Any, body: Mapping[str, Any], cause: Optional[Any] = ..., create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def simulate_index_template( self, *, name: Any, body: Optional[Mapping[str, Any]] = ..., cause: Optional[Any] = ..., create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_data_stream( self, *, name: Optional[Union[List[str], str]] = ..., expand_wildcards: Optional[ Union[ List[Union[Literal["all", "closed", "hidden", "none", "open"], str]], Union[Literal["all", "closed", "hidden", "none", "open"], str], ] ] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def simulate_template( self, *, body: Optional[Mapping[str, Any]] = ..., name: Optional[Any] = ..., cause: Optional[Any] = ..., create: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def resolve_index( self, *, name: Any, expand_wildcards: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def add_block( self, *, index: Any, block: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def data_streams_stats( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def promote_data_stream( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def migrate_to_data_stream( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def disk_usage( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., flush: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., run_expensive_tasks: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def field_usage_stats( self, *, index: Any, allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., fields: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def modify_data_stream( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/ingest.py000066400000000000000000000124141426163262700230220ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): @query_params( "master_timeout", "summary", response_mimetypes=["application/json"], ) def get_pipeline(self, id=None, params=None, headers=None): """ Returns a pipeline. ``_ :arg id: Comma separated list of pipeline ids. Wildcards supported :arg master_timeout: Explicit operation timeout for connection to master node :arg summary: Return pipelines without their definitions (default: false) """ return self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers ) @query_params( "if_version", "master_timeout", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_pipeline(self, id, body, params=None, headers=None): """ Creates or updates a pipeline. ``_ :arg id: Pipeline ID :arg body: The ingest definition :arg if_version: Required version for optimistic concurrency control for pipeline updates :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ingest", "pipeline", id), params=params, headers=headers, body=body, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_pipeline(self, id, params=None, headers=None): """ Deletes a pipeline. ``_ :arg id: Pipeline ID :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_ingest", "pipeline", id), params=params, headers=headers, ) @query_params( "verbose", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def simulate(self, body, id=None, params=None, headers=None): """ Allows to simulate a pipeline with example documents. ``_ :arg body: The simulate definition :arg id: Pipeline ID :arg verbose: Verbose mode. Display data output for each processor in executed pipeline """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", _make_path("_ingest", "pipeline", id, "_simulate"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def processor_grok(self, params=None, headers=None): """ Returns a list of the built-in patterns. ``_ """ return self.transport.perform_request( "GET", "/_ingest/processor/grok", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def geo_ip_stats(self, params=None, headers=None): """ Returns statistical information about geoip databases ``_ """ return self.transport.perform_request( "GET", "/_ingest/geoip/stats", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/ingest.pyi000066400000000000000000000130761426163262700232000ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class IngestClient(NamespacedClient): def get_pipeline( self, *, id: Optional[Any] = ..., master_timeout: Optional[Any] = ..., summary: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_pipeline( self, *, id: Any, body: Mapping[str, Any], if_version: Optional[Any] = ..., master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_pipeline( self, *, id: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def simulate( self, *, body: Mapping[str, Any], id: Optional[Any] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def processor_grok( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def geo_ip_stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/license.py000066400000000000000000000113101426163262700231450ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class LicenseClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete(self, params=None, headers=None): """ Deletes licensing information for the cluster ``_ """ return self.transport.perform_request( "DELETE", "/_license", params=params, headers=headers ) @query_params( "accept_enterprise", "local", response_mimetypes=["application/json"], ) def get(self, params=None, headers=None): """ Retrieves licensing information for the cluster ``_ :arg accept_enterprise: If the active license is an enterprise license, return type as 'enterprise' (default: false) :arg local: Return local information, do not retrieve the state from master node (default: false) """ return self.transport.perform_request( "GET", "/_license", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_basic_status(self, params=None, headers=None): """ Retrieves information about the status of the basic license. ``_ """ return self.transport.perform_request( "GET", "/_license/basic_status", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_trial_status(self, params=None, headers=None): """ Retrieves information about the status of the trial license. ``_ """ return self.transport.perform_request( "GET", "/_license/trial_status", params=params, headers=headers ) @query_params( "acknowledge", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def post(self, body=None, params=None, headers=None): """ Updates the license for the cluster. ``_ :arg body: licenses to be installed :arg acknowledge: whether the user has acknowledged acknowledge messages (default: false) """ return self.transport.perform_request( "PUT", "/_license", params=params, headers=headers, body=body ) @query_params( "acknowledge", response_mimetypes=["application/json"], ) def post_start_basic(self, params=None, headers=None): """ Starts an indefinite basic license. ``_ :arg acknowledge: whether the user has acknowledged acknowledge messages (default: false) """ return self.transport.perform_request( "POST", "/_license/start_basic", params=params, headers=headers ) @query_params( "acknowledge", "type", response_mimetypes=["application/json"], ) def post_start_trial(self, params=None, headers=None): """ starts a limited time trial license. ``_ :arg acknowledge: whether the user has acknowledged acknowledge messages (default: false) :arg type: The type of trial license to generate (default: "trial") """ return self.transport.perform_request( "POST", "/_license/start_trial", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/license.pyi000066400000000000000000000140571426163262700233310ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class LicenseClient(NamespacedClient): def delete( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get( self, *, accept_enterprise: Optional[bool] = ..., local: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_basic_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_trial_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def post( self, *, body: Optional[Mapping[str, Any]] = ..., acknowledge: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def post_start_basic( self, *, acknowledge: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def post_start_trial( self, *, acknowledge: Optional[bool] = ..., type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/logstash.py000066400000000000000000000060361426163262700233600ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class LogstashClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete_pipeline(self, id, params=None, headers=None): """ Deletes Logstash Pipelines used by Central Management ``_ :arg id: The ID of the Pipeline """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_logstash", "pipeline", id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_pipeline(self, id, params=None, headers=None): """ Retrieves Logstash Pipelines used by Central Management ``_ :arg id: A comma-separated list of Pipeline IDs """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_logstash", "pipeline", id), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_pipeline(self, id, body, params=None, headers=None): """ Adds and updates Logstash Pipelines used for Central Management ``_ :arg id: The ID of the Pipeline :arg body: The Pipeline to add or update """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_logstash", "pipeline", id), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/logstash.pyi000066400000000000000000000061571426163262700235350ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class LogstashClient(NamespacedClient): def delete_pipeline( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_pipeline( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_pipeline( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/migration.py000066400000000000000000000047301426163262700235240ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, _make_path, query_params class MigrationClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def deprecations(self, index=None, params=None, headers=None): """ Retrieves information about different cluster, node, and index level settings that use deprecated features that will be removed or changed in the next major version. ``_ :arg index: Index pattern """ return self.transport.perform_request( "GET", _make_path(index, "_migration", "deprecations"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_feature_upgrade_status(self, params=None, headers=None): """ Find out whether system features need to be upgraded or not ``_ """ return self.transport.perform_request( "GET", "/_migration/system_features", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def post_feature_upgrade(self, params=None, headers=None): """ Begin upgrades for system features ``_ """ return self.transport.perform_request( "POST", "/_migration/system_features", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/migration.pyi000066400000000000000000000060451426163262700236760ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class MigrationClient(NamespacedClient): def deprecations( self, *, index: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_feature_upgrade_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def post_feature_upgrade( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/ml.py000066400000000000000000002217671426163262700221560ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, _make_path, query_params class MlClient(NamespacedClient): @query_params( "allow_no_jobs", "allow_no_match", "force", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def close_job(self, job_id, body=None, params=None, headers=None): """ Closes one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. ``_ :arg job_id: The name of the job to close :arg body: The URL params optionally sent in the body :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg force: True if the job should be forcefully closed :arg timeout: Controls the time to wait until a job has closed. Default to 30 minutes """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_close"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def delete_calendar(self, calendar_id, params=None, headers=None): """ Deletes a calendar. ``_ :arg calendar_id: The ID of the calendar to delete """ if calendar_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'calendar_id'." ) return self.transport.perform_request( "DELETE", _make_path("_ml", "calendars", calendar_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def delete_calendar_event(self, calendar_id, event_id, params=None, headers=None): """ Deletes scheduled events from a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg event_id: The ID of the event to remove from the calendar """ for param in (calendar_id, event_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path("_ml", "calendars", calendar_id, "events", event_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def delete_calendar_job(self, calendar_id, job_id, params=None, headers=None): """ Deletes anomaly detection jobs from a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg job_id: The ID of the job to remove from the calendar """ for param in (calendar_id, job_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path("_ml", "calendars", calendar_id, "jobs", job_id), params=params, headers=headers, ) @query_params( "force", response_mimetypes=["application/json"], ) def delete_datafeed(self, datafeed_id, params=None, headers=None): """ Deletes an existing datafeed. ``_ :arg datafeed_id: The ID of the datafeed to delete :arg force: True if the datafeed should be forcefully deleted """ if datafeed_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'datafeed_id'." ) return self.transport.perform_request( "DELETE", _make_path("_ml", "datafeeds", datafeed_id), params=params, headers=headers, ) @query_params( "requests_per_second", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def delete_expired_data(self, body=None, job_id=None, params=None, headers=None): """ Deletes expired and unused machine learning data. ``_ :arg body: deleting expired data parameters :arg job_id: The ID of the job(s) to perform expired data hygiene for :arg requests_per_second: The desired requests per second for the deletion processes. :arg timeout: How long can the underlying delete processes run until they are canceled """ return self.transport.perform_request( "DELETE", _make_path("_ml", "_delete_expired_data", job_id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def delete_filter(self, filter_id, params=None, headers=None): """ Deletes a filter. ``_ :arg filter_id: The ID of the filter to delete """ if filter_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'filter_id'.") return self.transport.perform_request( "DELETE", _make_path("_ml", "filters", filter_id), params=params, headers=headers, ) @query_params( "allow_no_forecasts", "timeout", response_mimetypes=["application/json"], ) def delete_forecast(self, job_id, forecast_id=None, params=None, headers=None): """ Deletes forecasts from a machine learning job. ``_ :arg job_id: The ID of the job from which to delete forecasts :arg forecast_id: The ID of the forecast to delete, can be comma delimited list. Leaving blank implies `_all` :arg allow_no_forecasts: Whether to ignore if `_all` matches no forecasts :arg timeout: Controls the time to wait until the forecast(s) are deleted. Default to 30 seconds """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "DELETE", _make_path("_ml", "anomaly_detectors", job_id, "_forecast", forecast_id), params=params, headers=headers, ) @query_params( "force", "wait_for_completion", response_mimetypes=["application/json"], ) def delete_job(self, job_id, params=None, headers=None): """ Deletes an existing anomaly detection job. ``_ :arg job_id: The ID of the job to delete :arg force: True if the job should be forcefully deleted :arg wait_for_completion: Should this request wait until the operation has completed before returning Default: True """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "DELETE", _make_path("_ml", "anomaly_detectors", job_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def delete_model_snapshot(self, job_id, snapshot_id, params=None, headers=None): """ Deletes an existing model snapshot. ``_ :arg job_id: The ID of the job to fetch :arg snapshot_id: The ID of the snapshot to delete """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id ), params=params, headers=headers, ) @query_params( "advance_time", "calc_interim", "end", "skip_time", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def flush_job(self, job_id, body=None, params=None, headers=None): """ Forces any buffered data to be processed by the job. ``_ :arg job_id: The name of the job to flush :arg body: Flush parameters :arg advance_time: Advances time to the given value generating results and updating the model for the advanced interval :arg calc_interim: Calculates interim results for the most recent bucket or all buckets within the latency period :arg end: When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results :arg skip_time: Skips time to the given value without generating results or updating the model for the skipped interval :arg start: When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_flush"), params=params, headers=headers, body=body, ) @query_params( "duration", "expires_in", "max_model_memory", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def forecast(self, job_id, body=None, params=None, headers=None): """ Predicts the future behavior of a time series by using its historical behavior. ``_ :arg job_id: The ID of the job to forecast for :arg body: Query parameters can be specified in the body :arg duration: The duration of the forecast :arg expires_in: The time interval after which the forecast expires. Expired forecasts will be deleted at the first opportunity. :arg max_model_memory: The max memory able to be used by the forecast. Default is 20mb. """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_forecast"), params=params, headers=headers, body=body, ) @query_params( "anomaly_score", "desc", "end", "exclude_interim", "expand", "from_", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_buckets(self, job_id, body=None, timestamp=None, params=None, headers=None): """ Retrieves anomaly detection job results for one or more buckets. ``_ :arg job_id: ID of the job to get bucket results from :arg body: Bucket selection details if not provided in URI :arg timestamp: The timestamp of the desired single bucket result :arg anomaly_score: Filter for the most anomalous buckets :arg desc: Set the sort direction :arg end: End time filter for buckets :arg exclude_interim: Exclude interim results :arg expand: Include anomaly records :arg from_: skips a number of buckets :arg size: specifies a max number of buckets to get :arg sort: Sort buckets by a particular field :arg start: Start time filter for buckets """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "results", "buckets", timestamp ), params=params, headers=headers, body=body, ) @query_params( "end", "from_", "job_id", "size", "start", response_mimetypes=["application/json"], ) def get_calendar_events(self, calendar_id, params=None, headers=None): """ Retrieves information about the scheduled events in calendars. ``_ :arg calendar_id: The ID of the calendar containing the events :arg end: Get events before this time :arg from_: Skips a number of events :arg job_id: Get events for the job. When this option is used calendar_id must be '_all' :arg size: Specifies a max number of events to get :arg start: Get events after this time """ if "from_" in params: params["from"] = params.pop("from_") if calendar_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'calendar_id'." ) return self.transport.perform_request( "GET", _make_path("_ml", "calendars", calendar_id, "events"), params=params, headers=headers, ) @query_params( "from_", "size", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_calendars(self, body=None, calendar_id=None, params=None, headers=None): """ Retrieves configuration information for calendars. ``_ :arg body: The from and size parameters optionally sent in the body :arg calendar_id: The ID of the calendar to fetch :arg from_: skips a number of calendars :arg size: specifies a max number of calendars to get """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "POST", _make_path("_ml", "calendars", calendar_id), params=params, headers=headers, body=body, ) @query_params( "from_", "partition_field_value", "size", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_categories( self, job_id, body=None, category_id=None, params=None, headers=None ): """ Retrieves anomaly detection job results for one or more categories. ``_ :arg job_id: The name of the job :arg body: Category selection details if not provided in URI :arg category_id: The identifier of the category definition of interest :arg from_: skips a number of categories :arg partition_field_value: Specifies the partition to retrieve categories for. This is optional, and should never be used for jobs where per-partition categorization is disabled. :arg size: specifies a max number of categories to get """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "results", "categories", category_id ), params=params, headers=headers, body=body, ) @query_params( "allow_no_datafeeds", "allow_no_match", response_mimetypes=["application/json"], ) def get_datafeed_stats(self, datafeed_id=None, params=None, headers=None): """ Retrieves usage information for datafeeds. ``_ :arg datafeed_id: The ID of the datafeeds stats to fetch :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) """ return self.transport.perform_request( "GET", _make_path("_ml", "datafeeds", datafeed_id, "_stats"), params=params, headers=headers, ) @query_params( "allow_no_datafeeds", "allow_no_match", "exclude_generated", response_mimetypes=["application/json"], ) def get_datafeeds(self, datafeed_id=None, params=None, headers=None): """ Retrieves configuration information for datafeeds. ``_ :arg datafeed_id: The ID of the datafeeds to fetch :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg exclude_generated: Omits fields that are illegal to set on datafeed PUT """ return self.transport.perform_request( "GET", _make_path("_ml", "datafeeds", datafeed_id), params=params, headers=headers, ) @query_params( "from_", "size", response_mimetypes=["application/json"], ) def get_filters(self, filter_id=None, params=None, headers=None): """ Retrieves filters. ``_ :arg filter_id: The ID of the filter to fetch :arg from_: skips a number of filters :arg size: specifies a max number of filters to get """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_ml", "filters", filter_id), params=params, headers=headers, ) @query_params( "desc", "end", "exclude_interim", "from_", "influencer_score", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_influencers(self, job_id, body=None, params=None, headers=None): """ Retrieves anomaly detection job results for one or more influencers. ``_ :arg job_id: Identifier for the anomaly detection job :arg body: Influencer selection criteria :arg desc: whether the results should be sorted in decending order :arg end: end timestamp for the requested influencers :arg exclude_interim: Exclude interim results :arg from_: skips a number of influencers :arg influencer_score: influencer score threshold for the requested influencers :arg size: specifies a max number of influencers to get :arg sort: sort field for the requested influencers :arg start: start timestamp for the requested influencers """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "results", "influencers"), params=params, headers=headers, body=body, ) @query_params( "allow_no_jobs", "allow_no_match", response_mimetypes=["application/json"], ) def get_job_stats(self, job_id=None, params=None, headers=None): """ Retrieves usage information for anomaly detection jobs. ``_ :arg job_id: The ID of the jobs stats to fetch :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) """ return self.transport.perform_request( "GET", _make_path("_ml", "anomaly_detectors", job_id, "_stats"), params=params, headers=headers, ) @query_params( "allow_no_jobs", "allow_no_match", "exclude_generated", response_mimetypes=["application/json"], ) def get_jobs(self, job_id=None, params=None, headers=None): """ Retrieves configuration information for anomaly detection jobs. ``_ :arg job_id: The ID of the jobs to fetch :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg exclude_generated: Omits fields that are illegal to set on job PUT """ return self.transport.perform_request( "GET", _make_path("_ml", "anomaly_detectors", job_id), params=params, headers=headers, ) @query_params( "desc", "end", "from_", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_model_snapshots( self, job_id, body=None, snapshot_id=None, params=None, headers=None ): """ Retrieves information about model snapshots. ``_ :arg job_id: The ID of the job to fetch :arg body: Model snapshot selection criteria :arg snapshot_id: The ID of the snapshot to fetch :arg desc: True if the results should be sorted in descending order :arg end: The filter 'end' query parameter :arg from_: Skips a number of documents :arg size: The default number of documents returned in queries as a string. :arg sort: Name of the field to sort on :arg start: The filter 'start' query parameter """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id ), params=params, headers=headers, body=body, ) @query_params( "allow_no_jobs", "allow_no_match", "bucket_span", "end", "exclude_interim", "overall_score", "start", "top_n", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_overall_buckets(self, job_id, body=None, params=None, headers=None): """ Retrieves overall bucket results that summarize the bucket results of multiple anomaly detection jobs. ``_ :arg job_id: The job IDs for which to calculate overall bucket results :arg body: Overall bucket selection details if not provided in URI :arg allow_no_jobs: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified) :arg bucket_span: The span of the overall buckets. Defaults to the longest job bucket_span :arg end: Returns overall buckets with timestamps earlier than this time :arg exclude_interim: If true overall buckets that include interim buckets will be excluded :arg overall_score: Returns overall buckets with overall scores higher than this value :arg start: Returns overall buckets with timestamps after this time :arg top_n: The number of top job bucket scores to be used in the overall_score calculation """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "results", "overall_buckets" ), params=params, headers=headers, body=body, ) @query_params( "desc", "end", "exclude_interim", "from_", "record_score", "size", "sort", "start", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_records(self, job_id, body=None, params=None, headers=None): """ Retrieves anomaly records for an anomaly detection job. ``_ :arg job_id: The ID of the job :arg body: Record selection criteria :arg desc: Set the sort direction :arg end: End time filter for records :arg exclude_interim: Exclude interim results :arg from_: skips a number of records :arg record_score: Returns records with anomaly scores greater or equal than this value :arg size: specifies a max number of records to get :arg sort: Sort records by a particular field :arg start: Start time filter for records """ if "from_" in params: params["from"] = params.pop("from_") if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "results", "records"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def info(self, params=None, headers=None): """ Returns defaults and limits used by machine learning. ``_ """ return self.transport.perform_request( "GET", "/_ml/info", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def open_job(self, job_id, body=None, params=None, headers=None): """ Opens one or more anomaly detection jobs. ``_ :arg job_id: The ID of the job to open :arg body: Query parameters can be specified in the body """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_open"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def post_calendar_events(self, calendar_id, body, params=None, headers=None): """ Posts scheduled events in a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg body: A list of events """ for param in (calendar_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_ml", "calendars", calendar_id, "events"), params=params, headers=headers, body=body, ) @query_params( "reset_end", "reset_start", request_mimetypes=["application/x-ndjson", "application/json"], response_mimetypes=["application/json"], ) def post_data(self, job_id, body, params=None, headers=None): """ Sends data to an anomaly detection job for analysis. ``_ :arg job_id: The name of the job receiving the data :arg body: The data to process :arg reset_end: Optional parameter to specify the end of the bucket resetting range :arg reset_start: Optional parameter to specify the start of the bucket resetting range """ for param in (job_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_data"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def preview_datafeed(self, body=None, datafeed_id=None, params=None, headers=None): """ Previews a datafeed. ``_ :arg body: The datafeed config and job config with which to execute the preview :arg datafeed_id: The ID of the datafeed to preview """ return self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_preview"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_calendar(self, calendar_id, body=None, params=None, headers=None): """ Instantiates a calendar. ``_ :arg calendar_id: The ID of the calendar to create :arg body: The calendar details """ if calendar_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'calendar_id'." ) return self.transport.perform_request( "PUT", _make_path("_ml", "calendars", calendar_id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def put_calendar_job(self, calendar_id, job_id, params=None, headers=None): """ Adds an anomaly detection job to a calendar. ``_ :arg calendar_id: The ID of the calendar to modify :arg job_id: The ID of the job to add to the calendar """ for param in (calendar_id, job_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ml", "calendars", calendar_id, "jobs", job_id), params=params, headers=headers, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_throttled", "ignore_unavailable", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_datafeed(self, datafeed_id, body, params=None, headers=None): """ Instantiates a datafeed. ``_ :arg datafeed_id: The ID of the datafeed to create :arg body: The datafeed config :arg allow_no_indices: Ignore if the source indices expressions resolves to no concrete indices (default: true) :arg expand_wildcards: Whether source index expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all :arg ignore_throttled: Ignore indices that are marked as throttled (default: true) :arg ignore_unavailable: Ignore unavailable indexes (default: false) """ for param in (datafeed_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ml", "datafeeds", datafeed_id), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_filter(self, filter_id, body, params=None, headers=None): """ Instantiates a filter. ``_ :arg filter_id: The ID of the filter to create :arg body: The filter details """ for param in (filter_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ml", "filters", filter_id), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_throttled", "ignore_unavailable", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_job(self, job_id, body, params=None, headers=None): """ Instantiates an anomaly detection job. ``_ :arg job_id: The ID of the job to create :arg body: The job :arg allow_no_indices: Ignore if the source indices expressions resolves to no concrete indices (default: true). Only set if datafeed_config is provided. :arg expand_wildcards: Whether source index expressions should get expanded to open or closed indices (default: open). Only set if datafeed_config is provided. Valid choices: open, closed, hidden, none, all :arg ignore_throttled: Ignore indices that are marked as throttled (default: true). Only set if datafeed_config is provided. :arg ignore_unavailable: Ignore unavailable indexes (default: false). Only set if datafeed_config is provided. """ for param in (job_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ml", "anomaly_detectors", job_id), params=params, headers=headers, body=body, ) @query_params( "delete_intervening_results", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def revert_model_snapshot( self, job_id, snapshot_id, body=None, params=None, headers=None ): """ Reverts to a specific snapshot. ``_ :arg job_id: The ID of the job to fetch :arg snapshot_id: The ID of the snapshot to revert to :arg body: Reversion options :arg delete_intervening_results: Should we reset the results back to the time of the snapshot? """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_revert", ), params=params, headers=headers, body=body, ) @query_params( "enabled", "timeout", response_mimetypes=["application/json"], ) def set_upgrade_mode(self, params=None, headers=None): """ Sets a cluster wide upgrade_mode setting that prepares machine learning indices for an upgrade. ``_ :arg enabled: Whether to enable upgrade_mode ML setting or not. Defaults to false. :arg timeout: Controls the time to wait before action times out. Defaults to 30 seconds """ return self.transport.perform_request( "POST", "/_ml/set_upgrade_mode", params=params, headers=headers ) @query_params( "end", "start", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def start_datafeed(self, datafeed_id, body=None, params=None, headers=None): """ Starts one or more datafeeds. ``_ :arg datafeed_id: The ID of the datafeed to start :arg body: The start datafeed parameters :arg end: The end time when the datafeed should stop. When not set, the datafeed continues in real time :arg start: The start time from where the datafeed should begin :arg timeout: Controls the time to wait until a datafeed has started. Default to 20 seconds """ if datafeed_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'datafeed_id'." ) return self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_start"), params=params, headers=headers, body=body, ) @query_params( "allow_no_datafeeds", "allow_no_match", "force", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def stop_datafeed(self, datafeed_id, body=None, params=None, headers=None): """ Stops one or more datafeeds. ``_ :arg datafeed_id: The ID of the datafeed to stop :arg body: The URL params optionally sent in the body :arg allow_no_datafeeds: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg allow_no_match: Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified) :arg force: True if the datafeed should be forcefully stopped. :arg timeout: Controls the time to wait until a datafeed has stopped. Default to 20 seconds """ if datafeed_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'datafeed_id'." ) return self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_stop"), params=params, headers=headers, body=body, ) @query_params( "allow_no_indices", "expand_wildcards", "ignore_throttled", "ignore_unavailable", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_datafeed(self, datafeed_id, body, params=None, headers=None): """ Updates certain properties of a datafeed. ``_ :arg datafeed_id: The ID of the datafeed to update :arg body: The datafeed update settings :arg allow_no_indices: Ignore if the source indices expressions resolves to no concrete indices (default: true) :arg expand_wildcards: Whether source index expressions should get expanded to open or closed indices (default: open) Valid choices: open, closed, hidden, none, all :arg ignore_throttled: Ignore indices that are marked as throttled (default: true) :arg ignore_unavailable: Ignore unavailable indexes (default: false) """ for param in (datafeed_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_ml", "datafeeds", datafeed_id, "_update"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_filter(self, filter_id, body, params=None, headers=None): """ Updates the description of a filter, adds items, or removes items. ``_ :arg filter_id: The ID of the filter to update :arg body: The filter update """ for param in (filter_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_ml", "filters", filter_id, "_update"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_job(self, job_id, body, params=None, headers=None): """ Updates certain properties of an anomaly detection job. ``_ :arg job_id: The ID of the job to create :arg body: The job update settings """ for param in (job_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_update"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_model_snapshot( self, job_id, snapshot_id, body, params=None, headers=None ): """ Updates certain properties of a snapshot. ``_ :arg job_id: The ID of the job to fetch :arg snapshot_id: The ID of the snapshot to update :arg body: The model snapshot properties to update """ for param in (job_id, snapshot_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_update", ), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def validate(self, body, params=None, headers=None): """ Validates an anomaly detection job. ``_ :arg body: The job config """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_ml/anomaly_detectors/_validate", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def validate_detector(self, body, params=None, headers=None): """ Validates an anomaly detection detector. ``_ :arg body: The detector """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_ml/anomaly_detectors/_validate/detector", params=params, headers=headers, body=body, ) @query_params( "force", "timeout", response_mimetypes=["application/json"], ) def delete_data_frame_analytics(self, id, params=None, headers=None): """ Deletes an existing data frame analytics job. ``_ :arg id: The ID of the data frame analytics to delete :arg force: True if the job should be forcefully deleted :arg timeout: Controls the time to wait until a job is deleted. Defaults to 1 minute """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_ml", "data_frame", "analytics", id), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def evaluate_data_frame(self, body, params=None, headers=None): """ Evaluates the data frame analytics for an annotated index. ``_ :arg body: The evaluation definition """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_ml/data_frame/_evaluate", params=params, headers=headers, body=body, ) @query_params( "allow_no_match", "exclude_generated", "from_", "size", response_mimetypes=["application/json"], ) def get_data_frame_analytics(self, id=None, params=None, headers=None): """ Retrieves configuration information for data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no data frame analytics. (This includes `_all` string or when no data frame analytics have been specified) Default: True :arg exclude_generated: Omits fields that are illegal to set on data frame analytics PUT :arg from_: skips a number of analytics :arg size: specifies a max number of analytics to get Default: 100 """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_ml", "data_frame", "analytics", id), params=params, headers=headers, ) @query_params( "allow_no_match", "from_", "size", "verbose", response_mimetypes=["application/json"], ) def get_data_frame_analytics_stats(self, id=None, params=None, headers=None): """ Retrieves usage information for data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics stats to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no data frame analytics. (This includes `_all` string or when no data frame analytics have been specified) Default: True :arg from_: skips a number of analytics :arg size: specifies a max number of analytics to get Default: 100 :arg verbose: whether the stats response should be verbose """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_ml", "data_frame", "analytics", id, "_stats"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_data_frame_analytics(self, id, body, params=None, headers=None): """ Instantiates a data frame analytics job. ``_ :arg id: The ID of the data frame analytics to create :arg body: The data frame analytics configuration """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ml", "data_frame", "analytics", id), params=params, headers=headers, body=body, ) @query_params( "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def start_data_frame_analytics(self, id, body=None, params=None, headers=None): """ Starts a data frame analytics job. ``_ :arg id: The ID of the data frame analytics to start :arg body: The start data frame analytics parameters :arg timeout: Controls the time to wait until the task has started. Defaults to 20 seconds """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_start"), params=params, headers=headers, body=body, ) @query_params( "allow_no_match", "force", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def stop_data_frame_analytics(self, id, body=None, params=None, headers=None): """ Stops one or more data frame analytics jobs. ``_ :arg id: The ID of the data frame analytics to stop :arg body: The stop data frame analytics parameters :arg allow_no_match: Whether to ignore if a wildcard expression matches no data frame analytics. (This includes `_all` string or when no data frame analytics have been specified) :arg force: True if the data frame analytics should be forcefully stopped :arg timeout: Controls the time to wait until the task has stopped. Defaults to 20 seconds """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_stop"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def delete_trained_model(self, model_id, params=None, headers=None): """ Deletes an existing trained inference model that is currently not referenced by an ingest pipeline. ``_ :arg model_id: The ID of the trained model to delete """ if model_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'model_id'.") return self.transport.perform_request( "DELETE", _make_path("_ml", "trained_models", model_id), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def explain_data_frame_analytics( self, body=None, id=None, params=None, headers=None ): """ Explains a data frame analytics config. ``_ :arg body: The data frame analytics config to explain :arg id: The ID of the data frame analytics to explain """ return self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_explain"), params=params, headers=headers, body=body, ) @query_params( "allow_no_match", "decompress_definition", "exclude_generated", "from_", "include", "include_model_definition", "size", "tags", response_mimetypes=["application/json"], ) def get_trained_models(self, model_id=None, params=None, headers=None): """ Retrieves configuration information for a trained inference model. ``_ :arg model_id: The ID of the trained models to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no trained models. (This includes `_all` string or when no trained models have been specified) Default: True :arg decompress_definition: Should the model definition be decompressed into valid JSON or returned in a custom compressed format. Defaults to true. Default: True :arg exclude_generated: Omits fields that are illegal to set on model PUT :arg from_: skips a number of trained models :arg include: A comma-separate list of fields to optionally include. Valid options are 'definition' and 'total_feature_importance'. Default is none. :arg include_model_definition: Should the full model definition be included in the results. These definitions can be large. So be cautious when including them. Defaults to false. :arg size: specifies a max number of trained models to get Default: 100 :arg tags: A comma-separated list of tags that the model must have. """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_ml", "trained_models", model_id), params=params, headers=headers, ) @query_params( "allow_no_match", "from_", "size", response_mimetypes=["application/json"], ) def get_trained_models_stats(self, model_id=None, params=None, headers=None): """ Retrieves usage information for trained inference models. ``_ :arg model_id: The ID of the trained models stats to fetch :arg allow_no_match: Whether to ignore if a wildcard expression matches no trained models. (This includes `_all` string or when no trained models have been specified) Default: True :arg from_: skips a number of trained models :arg size: specifies a max number of trained models to get Default: 100 """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_ml", "trained_models", model_id, "_stats"), params=params, headers=headers, ) @query_params( "defer_definition_decompression", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_trained_model(self, model_id, body, params=None, headers=None): """ Creates an inference trained model. ``_ :arg model_id: The ID of the trained models to store :arg body: The trained model configuration :arg defer_definition_decompression: If set to `true` and a `compressed_definition` is provided, the request defers definition decompression and skips relevant validations. """ for param in (model_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ml", "trained_models", model_id), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def estimate_model_memory(self, body, params=None, headers=None): """ Estimates the model memory ``_ :arg body: The analysis config, plus cardinality estimates for fields it references """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_ml/anomaly_detectors/_estimate_model_memory", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_data_frame_analytics(self, id, body, params=None, headers=None): """ Updates certain properties of a data frame analytics job. ``_ :arg id: The ID of the data frame analytics to update :arg body: The data frame analytics settings to update """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_update"), params=params, headers=headers, body=body, ) @query_params( "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) def upgrade_job_snapshot(self, job_id, snapshot_id, params=None, headers=None): """ Upgrades a given job snapshot to the current major version. ``_ :arg job_id: The ID of the job :arg snapshot_id: The ID of the snapshot :arg timeout: How long should the API wait for the job to be opened and the old snapshot to be loaded. :arg wait_for_completion: Should the request wait until the task is complete before responding to the caller. Default is false. """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_upgrade", ), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def delete_trained_model_alias( self, model_id, model_alias, params=None, headers=None ): """ Deletes a model alias that refers to the trained model ``_ :arg model_id: The trained model where the model alias is assigned :arg model_alias: The trained model alias to delete """ for param in (model_id, model_alias): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path("_ml", "trained_models", model_id, "model_aliases", model_alias), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def preview_data_frame_analytics( self, body=None, id=None, params=None, headers=None ): """ Previews that will be analyzed given a data frame analytics config. ``_ :arg body: The data frame analytics config to preview :arg id: The ID of the data frame analytics to preview """ return self.transport.perform_request( "POST", _make_path("_ml", "data_frame", "analytics", id, "_preview"), params=params, headers=headers, body=body, ) @query_params( "reassign", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_trained_model_alias(self, model_id, model_alias, params=None, headers=None): """ Creates a new model alias (or reassigns an existing one) to refer to the trained model ``_ :arg model_id: The trained model where the model alias should be assigned :arg model_alias: The trained model alias to update :arg reassign: If the model_alias already exists and points to a separate model_id, this parameter must be true. Defaults to false. """ for param in (model_id, model_alias): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_ml", "trained_models", model_id, "model_aliases", model_alias), params=params, headers=headers, ) @query_params( "charset", "column_names", "delimiter", "explain", "format", "grok_pattern", "has_header_row", "line_merge_size_limit", "lines_to_sample", "quote", "should_trim_fields", "timeout", "timestamp_field", "timestamp_format", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) def find_file_structure(self, body, params=None, headers=None): """ Finds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg body: The contents of the file to be analyzed :arg charset: Optional parameter to specify the character set of the file :arg column_names: Optional parameter containing a comma separated list of the column names for a delimited file :arg delimiter: Optional parameter to specify the delimiter character for a delimited file - must be a single character :arg explain: Whether to include a commentary on how the structure was derived :arg format: Optional parameter to specify the high level file format Valid choices: ndjson, xml, delimited, semi_structured_text :arg grok_pattern: Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi- structured text file :arg has_header_row: Optional parameter to specify whether a delimited file includes the column names in its first row :arg line_merge_size_limit: Maximum number of characters permitted in a single message when lines are merged to create messages. Default: 10000 :arg lines_to_sample: How many lines of the file should be included in the analysis Default: 1000 :arg quote: Optional parameter to specify the quote character for a delimited file - must be a single character :arg should_trim_fields: Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them :arg timeout: Timeout after which the analysis will be aborted Default: 25s :arg timestamp_field: Optional parameter to specify the timestamp field in the file :arg timestamp_format: Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", "/_ml/find_file_structure", params=params, headers=headers, body=body, ) @query_params( "wait_for_completion", response_mimetypes=["application/json"], ) def reset_job(self, job_id, params=None, headers=None): """ Resets an existing anomaly detection job. ``_ :arg job_id: The ID of the job to reset :arg wait_for_completion: Should this request wait until the operation has completed before returning Default: True """ if job_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'job_id'.") return self.transport.perform_request( "POST", _make_path("_ml", "anomaly_detectors", job_id, "_reset"), params=params, headers=headers, ) @query_params( "allow_no_match", response_mimetypes=["application/json"], ) def get_model_snapshot_upgrade_stats( self, job_id, snapshot_id, params=None, headers=None ): """ Gets stats for anomaly detection job model snapshot upgrades that are in progress. ``_ :arg job_id: The ID of the job. May be a wildcard, comma separated list or `_all`. :arg snapshot_id: The ID of the snapshot. May be a wildcard, comma separated list or `_all`. :arg allow_no_match: Whether to ignore if a wildcard expression matches no jobs or no snapshots. (This includes the `_all` string.) """ for param in (job_id, snapshot_id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "GET", _make_path( "_ml", "anomaly_detectors", job_id, "model_snapshots", snapshot_id, "_upgrade", "_stats", ), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/ml.pyi000066400000000000000000001577721426163262700223330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class MlClient(NamespacedClient): def close_job( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_calendar( self, *, calendar_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_calendar_event( self, *, calendar_id: Any, event_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_calendar_job( self, *, calendar_id: Any, job_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_datafeed( self, *, datafeed_id: Any, force: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_expired_data( self, *, body: Optional[Mapping[str, Any]] = ..., job_id: Optional[Any] = ..., requests_per_second: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_filter( self, *, filter_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_forecast( self, *, job_id: Any, forecast_id: Optional[Any] = ..., allow_no_forecasts: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_job( self, *, job_id: Any, force: Optional[bool] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_model_snapshot( self, *, job_id: Any, snapshot_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def flush_job( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., advance_time: Optional[Any] = ..., calc_interim: Optional[bool] = ..., end: Optional[Any] = ..., skip_time: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def forecast( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., duration: Optional[Any] = ..., expires_in: Optional[Any] = ..., max_model_memory: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_buckets( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., timestamp: Optional[Any] = ..., anomaly_score: Optional[Any] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., expand: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_calendar_events( self, *, calendar_id: Any, end: Optional[Any] = ..., from_: Optional[Any] = ..., job_id: Optional[Any] = ..., size: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_calendars( self, *, body: Optional[Mapping[str, Any]] = ..., calendar_id: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_categories( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., category_id: Optional[Any] = ..., from_: Optional[Any] = ..., partition_field_value: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_datafeed_stats( self, *, datafeed_id: Optional[Any] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_datafeeds( self, *, datafeed_id: Optional[Any] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_filters( self, *, filter_id: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_influencers( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., from_: Optional[Any] = ..., influencer_score: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_job_stats( self, *, job_id: Optional[Any] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_jobs( self, *, job_id: Optional[Any] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_model_snapshots( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., snapshot_id: Optional[Any] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_overall_buckets( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_jobs: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., bucket_span: Optional[Any] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., overall_score: Optional[Any] = ..., start: Optional[Any] = ..., top_n: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_records( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., desc: Optional[bool] = ..., end: Optional[Any] = ..., exclude_interim: Optional[bool] = ..., from_: Optional[Any] = ..., record_score: Optional[Any] = ..., size: Optional[Any] = ..., sort: Optional[Any] = ..., start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def info( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def open_job( self, *, job_id: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def post_calendar_events( self, *, calendar_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def post_data( self, *, job_id: Any, body: Union[Mapping[str, Any], Sequence[Mapping[str, Any]], bytes, str], reset_end: Optional[Any] = ..., reset_start: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def preview_datafeed( self, *, body: Optional[Mapping[str, Any]] = ..., datafeed_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_calendar( self, *, calendar_id: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_calendar_job( self, *, calendar_id: Any, job_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_datafeed( self, *, datafeed_id: Any, body: Mapping[str, Any], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_filter( self, *, filter_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_job( self, *, job_id: Any, body: Mapping[str, Any], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def revert_model_snapshot( self, *, job_id: Any, snapshot_id: Any, body: Optional[Mapping[str, Any]] = ..., delete_intervening_results: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def set_upgrade_mode( self, *, enabled: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def start_datafeed( self, *, datafeed_id: Any, body: Optional[Mapping[str, Any]] = ..., end: Optional[Any] = ..., start: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stop_datafeed( self, *, datafeed_id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_datafeeds: Optional[bool] = ..., allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_datafeed( self, *, datafeed_id: Any, body: Mapping[str, Any], allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_throttled: Optional[bool] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_filter( self, *, filter_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_job( self, *, job_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_model_snapshot( self, *, job_id: Any, snapshot_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def validate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def validate_detector( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_data_frame_analytics( self, *, id: Any, force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def evaluate_data_frame( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_data_frame_analytics( self, *, id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_data_frame_analytics_stats( self, *, id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_data_frame_analytics( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def start_data_frame_analytics( self, *, id: Any, body: Optional[Mapping[str, Any]] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stop_data_frame_analytics( self, *, id: Any, body: Optional[Mapping[str, Any]] = ..., allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_trained_model( self, *, model_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def explain_data_frame_analytics( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_trained_models( self, *, model_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., decompress_definition: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., from_: Optional[Any] = ..., include: Optional[Any] = ..., include_model_definition: Optional[bool] = ..., size: Optional[Any] = ..., tags: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_trained_models_stats( self, *, model_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_trained_model( self, *, model_id: Any, body: Mapping[str, Any], defer_definition_decompression: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def estimate_model_memory( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_data_frame_analytics( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def upgrade_job_snapshot( self, *, job_id: Any, snapshot_id: Any, timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_trained_model_alias( self, *, model_id: Any, model_alias: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def preview_data_frame_analytics( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_trained_model_alias( self, *, model_id: Any, model_alias: Any, reassign: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def find_file_structure( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], charset: Optional[Any] = ..., column_names: Optional[Any] = ..., delimiter: Optional[Any] = ..., explain: Optional[bool] = ..., format: Optional[Any] = ..., grok_pattern: Optional[Any] = ..., has_header_row: Optional[bool] = ..., line_merge_size_limit: Optional[Any] = ..., lines_to_sample: Optional[Any] = ..., quote: Optional[Any] = ..., should_trim_fields: Optional[bool] = ..., timeout: Optional[Any] = ..., timestamp_field: Optional[Any] = ..., timestamp_format: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def reset_job( self, *, job_id: Any, wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_model_snapshot_upgrade_stats( self, *, job_id: Any, snapshot_id: Any, allow_no_match: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/monitoring.py000066400000000000000000000042261426163262700237200ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, _make_path, query_params class MonitoringClient(NamespacedClient): @query_params( "interval", "system_api_version", "system_id", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) def bulk(self, body, doc_type=None, params=None, headers=None): """ Used by the monitoring features to send monitoring data. ``_ :arg body: The operation definition and data (action-data pairs), separated by newlines :arg doc_type: Default document type for items which don't provide one :arg interval: Collection interval (e.g., '10s' or '10000ms') of the payload :arg system_api_version: API Version of the monitored system :arg system_id: Identifier of the monitored system """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", _make_path("_monitoring", doc_type, "bulk"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/monitoring.pyi000066400000000000000000000036131426163262700240700ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class MonitoringClient(NamespacedClient): def bulk( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], doc_type: Optional[Any] = ..., interval: Optional[Any] = ..., system_api_version: Optional[Any] = ..., system_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/nodes.py000066400000000000000000000250161426163262700226430ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params( "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def reload_secure_settings( self, body=None, node_id=None, params=None, headers=None ): """ Reloads secure settings. ``_ :arg body: An object containing the password for the elasticsearch keystore :arg node_id: A comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes. :arg timeout: Explicit operation timeout """ return self.transport.perform_request( "POST", _make_path("_nodes", node_id, "reload_secure_settings"), params=params, headers=headers, body=body, ) @query_params( "flat_settings", "timeout", response_mimetypes=["application/json"], ) def info(self, node_id=None, metric=None, params=None, headers=None): """ Returns information about nodes in the cluster. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg metric: A comma-separated list of metrics you wish returned. Use `_all` to retrieve all metrics and `_none` to retrieve the node identity without any additional metrics. Valid choices: settings, os, process, jvm, thread_pool, transport, http, plugins, ingest, indices, aggregations, _all, _none :arg flat_settings: Return settings in flat format (default: false) :arg timeout: Explicit operation timeout """ return self.transport.perform_request( "GET", _make_path("_nodes", node_id, metric), params=params, headers=headers ) @query_params( "completion_fields", "fielddata_fields", "fields", "groups", "include_segment_file_sizes", "include_unloaded_segments", "level", "timeout", "types", response_mimetypes=["application/json"], ) def stats( self, node_id=None, metric=None, index_metric=None, params=None, headers=None ): """ Returns statistical information about nodes in the cluster. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg metric: Limit the information returned to the specified metrics Valid choices: _all, breaker, fs, http, indices, jvm, os, process, thread_pool, transport, discovery, indexing_pressure :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified. Valid choices: _all, completion, docs, fielddata, query_cache, flush, get, indexing, merge, request_cache, refresh, search, segments, store, warmer, suggest, shard_stats :arg completion_fields: A comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards) :arg fielddata_fields: A comma-separated list of fields for `fielddata` index metric (supports wildcards) :arg fields: A comma-separated list of fields for `fielddata` and `completion` index metric (supports wildcards) :arg groups: A comma-separated list of search groups for `search` index metric :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested) :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into memory :arg level: Return indices stats aggregated at index, node or shard level Valid choices: indices, node, shards Default: node :arg timeout: Explicit operation timeout :arg types: A comma-separated list of document types for the `indexing` index metric """ return self.transport.perform_request( "GET", _make_path("_nodes", node_id, "stats", metric, index_metric), params=params, headers=headers, ) @query_params( "ignore_idle_threads", "interval", "snapshots", "sort", "threads", "timeout", "type", response_mimetypes=["text/plain"], ) def hot_threads(self, node_id=None, params=None, headers=None): """ Returns information about hot threads on each node in the cluster. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty task queue (default: true) :arg interval: The interval for the second sampling of threads :arg snapshots: Number of samples of thread stacktrace (default: 10) :arg sort: The sort order for 'cpu' type (default: total) Valid choices: cpu, total :arg threads: Specify the number of threads to provide information for (default: 3) :arg timeout: Explicit operation timeout :arg type: The type to sample (default: cpu) Valid choices: cpu, wait, block, mem """ return self.transport.perform_request( "GET", _make_path("_nodes", node_id, "hot_threads"), params=params, headers=headers, ) @query_params( "timeout", response_mimetypes=["application/json"], ) def usage(self, node_id=None, metric=None, params=None, headers=None): """ Returns low-level information about REST actions usage on nodes. ``_ :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg metric: Limit the information returned to the specified metrics Valid choices: _all, rest_actions :arg timeout: Explicit operation timeout """ return self.transport.perform_request( "GET", _make_path("_nodes", node_id, "usage", metric), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def clear_repositories_metering_archive( self, node_id, max_archive_version, params=None, headers=None ): """ Removes the archived repositories metering information present in the cluster. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg node_id: Comma-separated list of node IDs or names used to limit returned information. :arg max_archive_version: Specifies the maximum archive_version to be cleared from the archive. """ for param in (node_id, max_archive_version): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path( "_nodes", node_id, "_repositories_metering", max_archive_version ), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_repositories_metering_info(self, node_id, params=None, headers=None): """ Returns cluster repositories metering information. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg node_id: A comma-separated list of node IDs or names to limit the returned information. """ if node_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'node_id'.") return self.transport.perform_request( "GET", _make_path("_nodes", node_id, "_repositories_metering"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/nodes.pyi000066400000000000000000000160571426163262700230210ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class NodesClient(NamespacedClient): def reload_secure_settings( self, *, body: Optional[Mapping[str, Any]] = ..., node_id: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def info( self, *, node_id: Optional[Any] = ..., metric: Optional[Any] = ..., flat_settings: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stats( self, *, node_id: Optional[Any] = ..., metric: Optional[Any] = ..., index_metric: Optional[Any] = ..., completion_fields: Optional[Any] = ..., fielddata_fields: Optional[Any] = ..., fields: Optional[Any] = ..., groups: Optional[bool] = ..., include_segment_file_sizes: Optional[bool] = ..., include_unloaded_segments: Optional[bool] = ..., level: Optional[Any] = ..., timeout: Optional[Any] = ..., types: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def hot_threads( self, *, node_id: Optional[Any] = ..., ignore_idle_threads: Optional[bool] = ..., interval: Optional[Any] = ..., snapshots: Optional[Any] = ..., sort: Optional[Any] = ..., threads: Optional[Any] = ..., timeout: Optional[Any] = ..., type: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> str: ... def usage( self, *, node_id: Optional[Any] = ..., metric: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_repositories_metering_archive( self, *, node_id: Any, max_archive_version: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_repositories_metering_info( self, *, node_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/remote.py000066400000000000000000000022461426163262700230260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() def info(self, params=None, headers=None): """ ``_ """ return self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/remote.pyi000066400000000000000000000027421426163262700232000ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class RemoteClient(NamespacedClient): def info( self, *, timeout: Optional[Any] = None, pretty: Optional[bool] = None, human: Optional[bool] = None, error_trace: Optional[bool] = None, format: Optional[str] = None, filter_path: Optional[Union[str, Collection[str]]] = None, http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = None, headers: Optional[MutableMapping[str, str]] = None, ) -> Any: ... elasticsearch-py-7.17.6/elasticsearch/client/rollup.py000066400000000000000000000223021426163262700230430ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class RollupClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete_job(self, id, params=None, headers=None): """ Deletes an existing rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to delete """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_rollup", "job", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_jobs(self, id=None, params=None, headers=None): """ Retrieves the configuration, stats, and status of rollup jobs. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job(s) to fetch. Accepts glob patterns, or left blank for all jobs """ return self.transport.perform_request( "GET", _make_path("_rollup", "job", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_rollup_caps(self, id=None, params=None, headers=None): """ Returns the capabilities of any rollup jobs that have been configured for a specific index or index pattern. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the index to check rollup capabilities on, or left blank for all jobs """ return self.transport.perform_request( "GET", _make_path("_rollup", "data", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_rollup_index_caps(self, index, params=None, headers=None): """ Returns the rollup capabilities of all jobs inside of a rollup index (e.g. the index where rollup data is stored). ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The rollup index or index pattern to obtain rollup capabilities from. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") return self.transport.perform_request( "GET", _make_path(index, "_rollup", "data"), params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_job(self, id, body, params=None, headers=None): """ Creates a rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to create :arg body: The job configuration """ for param in (id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_rollup", "job", id), params=params, headers=headers, body=body, ) @query_params( "rest_total_hits_as_int", "typed_keys", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def rollup_search(self, index, body, doc_type=None, params=None, headers=None): """ Enables searching rolled-up data using the standard query DSL. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The indices or index-pattern(s) (containing rollup or regular data) that should be searched :arg body: The search request body :arg doc_type: The doc type inside the index :arg rest_total_hits_as_int: Indicates whether hits.total should be rendered as an integer or an object in the rest search response :arg typed_keys: Specify whether aggregation and suggester names should be prefixed by their respective types in the response """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path(index, doc_type, "_rollup_search"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def start_job(self, id, params=None, headers=None): """ Starts an existing, stopped rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to start """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "POST", _make_path("_rollup", "job", id, "_start"), params=params, headers=headers, ) @query_params( "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) def stop_job(self, id, params=None, headers=None): """ Stops an existing, started rollup job. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg id: The ID of the job to stop :arg timeout: Block for (at maximum) the specified duration while waiting for the job to stop. Defaults to 30s. :arg wait_for_completion: True if the API should block until the job has fully stopped, false if should be executed async. Defaults to false. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "POST", _make_path("_rollup", "job", id, "_stop"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def rollup(self, index, rollup_index, body, params=None, headers=None): """ Rollup an index ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: The index to roll up :arg rollup_index: The name of the rollup index to create :arg body: The rollup configuration """ for param in (index, rollup_index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path(index, "_rollup", rollup_index), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/rollup.pyi000066400000000000000000000172311426163262700232210ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class RollupClient(NamespacedClient): def delete_job( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_jobs( self, *, id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_rollup_caps( self, *, id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_rollup_index_caps( self, *, index: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_job( self, *, id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def rollup_search( self, *, index: Any, body: Mapping[str, Any], doc_type: Optional[Any] = ..., rest_total_hits_as_int: Optional[bool] = ..., typed_keys: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def start_job( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stop_job( self, *, id: Any, timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def rollup( self, *, index: Any, rollup_index: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/searchable_snapshots.py000066400000000000000000000143331426163262700257260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SearchableSnapshotsClient(NamespacedClient): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", response_mimetypes=["application/json"], ) def clear_cache(self, index=None, params=None, headers=None): """ Clear the cache of searchable snapshots. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg index: A comma-separated list of index names :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both. Valid choices: open, closed, none, all Default: open :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) """ return self.transport.perform_request( "POST", _make_path(index, "_searchable_snapshots", "cache", "clear"), params=params, headers=headers, ) @query_params( "master_timeout", "storage", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def mount(self, repository, snapshot, body, params=None, headers=None): """ Mount a snapshot as a searchable index. ``_ :arg repository: The name of the repository containing the snapshot of the index to mount :arg snapshot: The name of the snapshot of the index to mount :arg body: The restore configuration for mounting the snapshot as searchable :arg master_timeout: Explicit operation timeout for connection to master node :arg storage: Selects the kind of local storage used to accelerate searches. Experimental, and defaults to `full_copy` :arg wait_for_completion: Should this request wait until the operation has completed before returning """ for param in (repository, snapshot, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_snapshot", repository, snapshot, "_mount"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def repository_stats(self, repository, params=None, headers=None): """ DEPRECATED: This API is replaced by the Repositories Metering API. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg repository: The repository for which to get the stats for """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return self.transport.perform_request( "GET", _make_path("_snapshot", repository, "_stats"), params=params, headers=headers, ) @query_params( "level", response_mimetypes=["application/json"], ) def stats(self, index=None, params=None, headers=None): """ Retrieve shard-level statistics about searchable snapshots. ``_ :arg index: A comma-separated list of index names :arg level: Return stats aggregated at cluster, index or shard level Valid choices: cluster, indices, shards Default: indices """ return self.transport.perform_request( "GET", _make_path(index, "_searchable_snapshots", "stats"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def cache_stats(self, node_id=None, params=None, headers=None): """ Retrieve node-level cache statistics about searchable snapshots. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg node_id: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes """ return self.transport.perform_request( "GET", _make_path("_searchable_snapshots", node_id, "cache", "stats"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/searchable_snapshots.pyi000066400000000000000000000116441426163262700261010ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SearchableSnapshotsClient(NamespacedClient): def clear_cache( self, *, index: Optional[Any] = ..., allow_no_indices: Optional[bool] = ..., expand_wildcards: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def mount( self, *, repository: Any, snapshot: Any, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., storage: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def repository_stats( self, *, repository: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stats( self, *, index: Optional[Any] = ..., level: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def cache_stats( self, *, node_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/security.py000066400000000000000000001070251426163262700234030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def authenticate(self, params=None, headers=None): """ Enables authentication as a user and retrieve information about the authenticated user. ``_ """ return self.transport.perform_request( "GET", "/_security/_authenticate", params=params, headers=headers ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def change_password(self, body, username=None, params=None, headers=None): """ Changes the passwords of users in the native realm and built-in users. ``_ :arg body: the new password for the user :arg username: The username of the user to change the password for :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", _make_path("_security", "user", username, "_password"), params=params, headers=headers, body=body, ) @query_params( "usernames", response_mimetypes=["application/json"], ) def clear_cached_realms(self, realms, params=None, headers=None): """ Evicts users from the user cache. Can completely clear the cache or evict specific users. ``_ :arg realms: Comma-separated list of realms to clear :arg usernames: Comma-separated list of usernames to clear from the cache """ if realms in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'realms'.") return self.transport.perform_request( "POST", _make_path("_security", "realm", realms, "_clear_cache"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def clear_cached_roles(self, name, params=None, headers=None): """ Evicts roles from the native role cache. ``_ :arg name: Role name """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "POST", _make_path("_security", "role", name, "_clear_cache"), params=params, headers=headers, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def create_api_key(self, body, params=None, headers=None): """ Creates an API key for access without requiring basic authentication. ``_ :arg body: The api key request to create an API key :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", "/_security/api_key", params=params, headers=headers, body=body ) @query_params( "refresh", response_mimetypes=["application/json"], ) def delete_privileges(self, application, name, params=None, headers=None): """ Removes application privileges. ``_ :arg application: Application name :arg name: Privilege name :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (application, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path("_security", "privilege", application, name), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) def delete_role(self, name, params=None, headers=None): """ Removes roles in the native realm. ``_ :arg name: Role name :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_security", "role", name), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) def delete_role_mapping(self, name, params=None, headers=None): """ Removes role mappings. ``_ :arg name: Role-mapping name :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( "DELETE", _make_path("_security", "role_mapping", name), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) def delete_user(self, username, params=None, headers=None): """ Deletes users from the native realm. ``_ :arg username: username :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") return self.transport.perform_request( "DELETE", _make_path("_security", "user", username), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) def disable_user(self, username, params=None, headers=None): """ Disables users in the native realm. ``_ :arg username: The username of the user to disable :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") return self.transport.perform_request( "PUT", _make_path("_security", "user", username, "_disable"), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) def enable_user(self, username, params=None, headers=None): """ Enables users in the native realm. ``_ :arg username: The username of the user to enable :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") return self.transport.perform_request( "PUT", _make_path("_security", "user", username, "_enable"), params=params, headers=headers, ) @query_params( "id", "name", "owner", "realm_name", "username", response_mimetypes=["application/json"], ) def get_api_key(self, params=None, headers=None): """ Retrieves information for one or more API keys. ``_ :arg id: API key id of the API key to be retrieved :arg name: API key name of the API key to be retrieved :arg owner: flag to query API keys owned by the currently authenticated user :arg realm_name: realm name of the user who created this API key to be retrieved :arg username: user name of the user who created this API key to be retrieved """ return self.transport.perform_request( "GET", "/_security/api_key", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_privileges(self, application=None, name=None, params=None, headers=None): """ Retrieves application privileges. ``_ :arg application: Application name :arg name: Privilege name """ return self.transport.perform_request( "GET", _make_path("_security", "privilege", application, name), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_role(self, name=None, params=None, headers=None): """ Retrieves roles in the native realm. ``_ :arg name: A comma-separated list of role names """ return self.transport.perform_request( "GET", _make_path("_security", "role", name), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_role_mapping(self, name=None, params=None, headers=None): """ Retrieves role mappings. ``_ :arg name: A comma-separated list of role-mapping names """ return self.transport.perform_request( "GET", _make_path("_security", "role_mapping", name), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_token(self, body, params=None, headers=None): """ Creates a bearer token for access without requiring basic authentication. ``_ :arg body: The token request to get """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_security/oauth2/token", params=params, headers=headers, body=body ) @query_params( response_mimetypes=["application/json"], ) def get_user(self, username=None, params=None, headers=None): """ Retrieves information about users in the native realm and built-in users. ``_ :arg username: A comma-separated list of usernames """ return self.transport.perform_request( "GET", _make_path("_security", "user", username), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_user_privileges(self, params=None, headers=None): """ Retrieves security privileges for the logged in user. ``_ """ return self.transport.perform_request( "GET", "/_security/user/_privileges", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def has_privileges(self, body, user=None, params=None, headers=None): """ Determines whether the specified user has a specified list of privileges. ``_ :arg body: The privileges to test :arg user: Username """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", _make_path("_security", "user", user, "_has_privileges"), params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def invalidate_api_key(self, body, params=None, headers=None): """ Invalidates one or more API keys. ``_ :arg body: The api key request to invalidate API key(s) """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "DELETE", "/_security/api_key", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def invalidate_token(self, body, params=None, headers=None): """ Invalidates one or more access tokens or refresh tokens. ``_ :arg body: The token to invalidate """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "DELETE", "/_security/oauth2/token", params=params, headers=headers, body=body, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_privileges(self, body, params=None, headers=None): """ Adds or updates application privileges. ``_ :arg body: The privilege(s) to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", "/_security/privilege/", params=params, headers=headers, body=body ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_role(self, name, body, params=None, headers=None): """ Adds and updates roles in the native realm. ``_ :arg name: Role name :arg body: The role to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_security", "role", name), params=params, headers=headers, body=body, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_role_mapping(self, name, body, params=None, headers=None): """ Creates and updates role mappings. ``_ :arg name: Role-mapping name :arg body: The role mapping to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (name, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_security", "role_mapping", name), params=params, headers=headers, body=body, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_user(self, username, body, params=None, headers=None): """ Adds and updates users in the native realm. These users are commonly referred to as native users. ``_ :arg username: The username of the User :arg body: The user to add :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (username, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_security", "user", username), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def get_builtin_privileges(self, params=None, headers=None): """ Retrieves the list of cluster privileges and index privileges that are available in this version of Elasticsearch. ``_ """ return self.transport.perform_request( "GET", "/_security/privilege/_builtin", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def clear_cached_privileges(self, application, params=None, headers=None): """ Evicts application privileges from the native application privileges cache. ``_ :arg application: A comma-separated list of application names """ if application in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'application'." ) return self.transport.perform_request( "POST", _make_path("_security", "privilege", application, "_clear_cache"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def clear_api_key_cache(self, ids, params=None, headers=None): """ Clear a subset or all entries from the API key cache. ``_ :arg ids: A comma-separated list of IDs of API keys to clear from the cache """ if ids in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'ids'.") return self.transport.perform_request( "POST", _make_path("_security", "api_key", ids, "_clear_cache"), params=params, headers=headers, ) @query_params( "refresh", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def grant_api_key(self, body, params=None, headers=None): """ Creates an API key on behalf of another user. ``_ :arg body: The api key request to create an API key :arg refresh: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_security/api_key/grant", params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def clear_cached_service_tokens( self, namespace, service, name, params=None, headers=None ): """ Evicts tokens from the service account token caches. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name :arg name: A comma-separated list of service token names """ for param in (namespace, service, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path( "_security", "service", namespace, service, "credential", "token", name, "_clear_cache", ), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) def create_service_token( self, namespace, service, name=None, params=None, headers=None ): """ Creates a service account token for access without requiring basic authentication. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name :arg name: An identifier for the token name :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` (the default) then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (namespace, service): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path( "_security", "service", namespace, service, "credential", "token", name ), params=params, headers=headers, ) @query_params( "refresh", response_mimetypes=["application/json"], ) def delete_service_token(self, namespace, service, name, params=None, headers=None): """ Deletes a service account token. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name :arg name: An identifier for the token name :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` (the default) then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. Valid choices: true, false, wait_for """ for param in (namespace, service, name): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path( "_security", "service", namespace, service, "credential", "token", name ), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_service_accounts( self, namespace=None, service=None, params=None, headers=None ): """ Retrieves information about service accounts. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name """ return self.transport.perform_request( "GET", _make_path("_security", "service", namespace, service), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_service_credentials(self, namespace, service, params=None, headers=None): """ Retrieves information of all service credentials for a service account. ``_ :arg namespace: An identifier for the namespace :arg service: An identifier for the service name """ for param in (namespace, service): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "GET", _make_path("_security", "service", namespace, service, "credential"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def saml_complete_logout(self, body, params=None, headers=None): """ Verifies the logout response sent from the SAML IdP ``_ :arg body: The logout response to verify """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_security/saml/complete_logout", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def saml_authenticate(self, body, params=None, headers=None): """ Exchanges a SAML Response message for an Elasticsearch access token and refresh token pair ``_ :arg body: The SAML response to authenticate """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_security/saml/authenticate", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def saml_invalidate(self, body, params=None, headers=None): """ Consumes a SAML LogoutRequest ``_ :arg body: The LogoutRequest message """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_security/saml/invalidate", params=params, headers=headers, body=body, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def saml_logout(self, body, params=None, headers=None): """ Invalidates an access token and a refresh token that were generated via the SAML Authenticate API ``_ :arg body: The tokens to invalidate """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_security/saml/logout", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def saml_prepare_authentication(self, body, params=None, headers=None): """ Creates a SAML authentication request ``_ :arg body: The realm for which to create the authentication request, identified by either its name or the ACS URL """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_security/saml/prepare", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def saml_service_provider_metadata(self, realm_name, params=None, headers=None): """ Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider ``_ :arg realm_name: The name of the SAML realm to get the metadata for """ if realm_name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'realm_name'.") return self.transport.perform_request( "GET", _make_path("_security", "saml", "metadata", realm_name), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def query_api_keys(self, body=None, params=None, headers=None): """ Retrieves information for API keys using a subset of query DSL ``_ :arg body: From, size, query, sort and search_after """ return self.transport.perform_request( "POST", "/_security/_query/api_key", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/security.pyi000066400000000000000000000766761426163262700235750ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SecurityClient(NamespacedClient): def authenticate( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def change_password( self, *, body: Mapping[str, Any], username: Optional[Any] = ..., refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_cached_realms( self, *, realms: Any, usernames: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_cached_roles( self, *, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def create_api_key( self, *, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_privileges( self, *, application: Any, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_role( self, *, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_role_mapping( self, *, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_user( self, *, username: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def disable_user( self, *, username: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def enable_user( self, *, username: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_api_key( self, *, id: Optional[Any] = ..., name: Optional[Any] = ..., owner: Optional[bool] = ..., realm_name: Optional[Any] = ..., username: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_privileges( self, *, application: Optional[Any] = ..., name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_role( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_role_mapping( self, *, name: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_token( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_user( self, *, username: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_user_privileges( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def has_privileges( self, *, body: Mapping[str, Any], user: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def invalidate_api_key( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def invalidate_token( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_privileges( self, *, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_role( self, *, name: Any, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_role_mapping( self, *, name: Any, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_user( self, *, username: Any, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_builtin_privileges( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_cached_privileges( self, *, application: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_api_key_cache( self, *, ids: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def grant_api_key( self, *, body: Mapping[str, Any], refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clear_cached_service_tokens( self, *, namespace: Any, service: Any, name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def create_service_token( self, *, namespace: Any, service: Any, name: Optional[Any] = ..., refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_service_token( self, *, namespace: Any, service: Any, name: Any, refresh: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_service_accounts( self, *, namespace: Optional[Any] = ..., service: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_service_credentials( self, *, namespace: Any, service: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def saml_complete_logout( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def saml_authenticate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def saml_invalidate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def saml_logout( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def saml_prepare_authentication( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def saml_service_provider_metadata( self, *, realm_name: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def query_api_keys( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/shutdown.py000066400000000000000000000064411426163262700234070ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ShutdownClient(NamespacedClient): @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def delete_node(self, node_id, params=None, headers=None): """ Removes a node from the shutdown list. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg node_id: The node id of node to be removed from the shutdown state """ if node_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'node_id'.") return self.transport.perform_request( "DELETE", _make_path("_nodes", node_id, "shutdown"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def get_node(self, node_id=None, params=None, headers=None): """ Retrieve status of a node or nodes that are currently marked as shutting down. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg node_id: Which node for which to retrieve the shutdown status """ return self.transport.perform_request( "GET", _make_path("_nodes", node_id, "shutdown"), params=params, headers=headers, ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_node(self, node_id, body, params=None, headers=None): """ Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. Direct use is not supported. ``_ :arg node_id: The node id of node to be shut down :arg body: The shutdown type definition to register """ for param in (node_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_nodes", node_id, "shutdown"), params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/shutdown.pyi000066400000000000000000000062021426163262700235530ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class ShutdownClient(NamespacedClient): def delete_node( self, *, node_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_node( self, *, node_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_node( self, *, node_id: Any, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/slm.py000066400000000000000000000141361426163262700223270ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SlmClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def delete_lifecycle(self, policy_id, params=None, headers=None): """ Deletes an existing snapshot lifecycle policy. ``_ :arg policy_id: The id of the snapshot lifecycle policy to remove """ if policy_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy_id'.") return self.transport.perform_request( "DELETE", _make_path("_slm", "policy", policy_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def execute_lifecycle(self, policy_id, params=None, headers=None): """ Immediately creates a snapshot according to the lifecycle policy, without waiting for the scheduled time. ``_ :arg policy_id: The id of the snapshot lifecycle policy to be executed """ if policy_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy_id'.") return self.transport.perform_request( "PUT", _make_path("_slm", "policy", policy_id, "_execute"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def execute_retention(self, params=None, headers=None): """ Deletes any snapshots that are expired according to the policy's retention rules. ``_ """ return self.transport.perform_request( "POST", "/_slm/_execute_retention", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_lifecycle(self, policy_id=None, params=None, headers=None): """ Retrieves one or more snapshot lifecycle policy definitions and information about the latest snapshot attempts. ``_ :arg policy_id: Comma-separated list of snapshot lifecycle policies to retrieve """ return self.transport.perform_request( "GET", _make_path("_slm", "policy", policy_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def get_stats(self, params=None, headers=None): """ Returns global and policy-level statistics about actions taken by snapshot lifecycle management. ``_ """ return self.transport.perform_request( "GET", "/_slm/stats", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_lifecycle(self, policy_id, body=None, params=None, headers=None): """ Creates or updates a snapshot lifecycle policy. ``_ :arg policy_id: The id of the snapshot lifecycle policy :arg body: The snapshot lifecycle policy definition to register """ if policy_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'policy_id'.") return self.transport.perform_request( "PUT", _make_path("_slm", "policy", policy_id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def get_status(self, params=None, headers=None): """ Retrieves the status of snapshot lifecycle management (SLM). ``_ """ return self.transport.perform_request( "GET", "/_slm/status", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def start(self, params=None, headers=None): """ Turns on snapshot lifecycle management (SLM). ``_ """ return self.transport.perform_request( "POST", "/_slm/start", params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def stop(self, params=None, headers=None): """ Turns off snapshot lifecycle management (SLM). ``_ """ return self.transport.perform_request( "POST", "/_slm/stop", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/slm.pyi000066400000000000000000000164361426163262700225050ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SlmClient(NamespacedClient): def delete_lifecycle( self, *, policy_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def execute_lifecycle( self, *, policy_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def execute_retention( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_lifecycle( self, *, policy_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_stats( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_lifecycle( self, *, policy_id: Any, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_status( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def start( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stop( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/snapshot.py000066400000000000000000000347611426163262700234010ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): @query_params( "master_timeout", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def create(self, repository, snapshot, body=None, params=None, headers=None): """ Creates a snapshot in a repository. ``_ :arg repository: A repository name :arg snapshot: A snapshot name :arg body: The snapshot definition :arg master_timeout: Explicit operation timeout for connection to master node :arg wait_for_completion: Should this request wait until the operation has completed before returning """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_snapshot", repository, snapshot), params=params, headers=headers, body=body, ) @query_params( "master_timeout", response_mimetypes=["application/json"], ) def delete(self, repository, snapshot, params=None, headers=None): """ Deletes a snapshot. ``_ :arg repository: A repository name :arg snapshot: A snapshot name :arg master_timeout: Explicit operation timeout for connection to master node """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "DELETE", _make_path("_snapshot", repository, snapshot), params=params, headers=headers, ) @query_params( "ignore_unavailable", "include_repository", "index_details", "master_timeout", "verbose", response_mimetypes=["application/json"], ) def get(self, repository, snapshot, params=None, headers=None): """ Returns information about a snapshot. ``_ :arg repository: A repository name :arg snapshot: A comma-separated list of snapshot names :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is thrown :arg include_repository: Whether to include the repository name in the snapshot info. Defaults to true. :arg index_details: Whether to include details of each index in the snapshot, if those details are available. Defaults to false. :arg master_timeout: Explicit operation timeout for connection to master node :arg verbose: Whether to show verbose snapshot info or only show the basic info found in the repository index blob """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "GET", _make_path("_snapshot", repository, snapshot), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def delete_repository(self, repository, params=None, headers=None): """ Deletes a repository. ``_ :arg repository: Name of the snapshot repository to unregister. Wildcard (`*`) patterns are supported. :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return self.transport.perform_request( "DELETE", _make_path("_snapshot", repository), params=params, headers=headers, ) @query_params( "local", "master_timeout", response_mimetypes=["application/json"], ) def get_repository(self, repository=None, params=None, headers=None): """ Returns information about a repository. ``_ :arg repository: A comma-separated list of repository names :arg local: Return local information, do not retrieve the state from master node (default: false) :arg master_timeout: Explicit operation timeout for connection to master node """ return self.transport.perform_request( "GET", _make_path("_snapshot", repository), params=params, headers=headers ) @query_params( "master_timeout", "timeout", "verify", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def create_repository(self, repository, body, params=None, headers=None): """ Creates a repository. ``_ :arg repository: A repository name :arg body: The repository definition :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout :arg verify: Whether to verify the repository after creation """ for param in (repository, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_snapshot", repository), params=params, headers=headers, body=body, ) @query_params( "master_timeout", "wait_for_completion", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def restore(self, repository, snapshot, body=None, params=None, headers=None): """ Restores a snapshot. ``_ :arg repository: A repository name :arg snapshot: A snapshot name :arg body: Details of what to restore :arg master_timeout: Explicit operation timeout for connection to master node :arg wait_for_completion: Should this request wait until the operation has completed before returning """ for param in (repository, snapshot): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_snapshot", repository, snapshot, "_restore"), params=params, headers=headers, body=body, ) @query_params( "ignore_unavailable", "master_timeout", response_mimetypes=["application/json"], ) def status(self, repository=None, snapshot=None, params=None, headers=None): """ Returns information about the status of a snapshot. ``_ :arg repository: A repository name :arg snapshot: A comma-separated list of snapshot names :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is thrown :arg master_timeout: Explicit operation timeout for connection to master node """ return self.transport.perform_request( "GET", _make_path("_snapshot", repository, snapshot, "_status"), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def verify_repository(self, repository, params=None, headers=None): """ Verifies a repository. ``_ :arg repository: A repository name :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return self.transport.perform_request( "POST", _make_path("_snapshot", repository, "_verify"), params=params, headers=headers, ) @query_params( "master_timeout", "timeout", response_mimetypes=["application/json"], ) def cleanup_repository(self, repository, params=None, headers=None): """ Removes stale data from repository. ``_ :arg repository: A repository name :arg master_timeout: Explicit operation timeout for connection to master node :arg timeout: Explicit operation timeout """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return self.transport.perform_request( "POST", _make_path("_snapshot", repository, "_cleanup"), params=params, headers=headers, ) @query_params( "master_timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def clone( self, repository, snapshot, target_snapshot, body, params=None, headers=None ): """ Clones indices from one snapshot into another snapshot in the same repository. ``_ :arg repository: A repository name :arg snapshot: The name of the snapshot to clone from :arg target_snapshot: The name of the cloned snapshot to create :arg body: The snapshot clone definition :arg master_timeout: Explicit operation timeout for connection to master node """ for param in (repository, snapshot, target_snapshot, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_snapshot", repository, snapshot, "_clone", target_snapshot), params=params, headers=headers, body=body, ) @query_params( "blob_count", "concurrency", "detailed", "early_read_node_count", "max_blob_size", "max_total_data_size", "rare_action_probability", "rarely_abort_writes", "read_node_count", "seed", "timeout", response_mimetypes=["application/json"], ) def repository_analyze(self, repository, params=None, headers=None): """ Analyzes a repository for correctness and performance ``_ :arg repository: A repository name :arg blob_count: Number of blobs to create during the test. Defaults to 100. :arg concurrency: Number of operations to run concurrently during the test. Defaults to 10. :arg detailed: Whether to return detailed results or a summary. Defaults to 'false' so that only the summary is returned. :arg early_read_node_count: Number of nodes on which to perform an early read on a blob, i.e. before writing has completed. Early reads are rare actions so the 'rare_action_probability' parameter is also relevant. Defaults to 2. :arg max_blob_size: Maximum size of a blob to create during the test, e.g '1gb' or '100mb'. Defaults to '10mb'. :arg max_total_data_size: Maximum total size of all blobs to create during the test, e.g '1tb' or '100gb'. Defaults to '1gb'. :arg rare_action_probability: Probability of taking a rare action such as an early read or an overwrite. Defaults to 0.02. :arg rarely_abort_writes: Whether to rarely abort writes before they complete. Defaults to 'true'. :arg read_node_count: Number of nodes on which to read a blob after writing. Defaults to 10. :arg seed: Seed for the random number generator used to create the test workload. Defaults to a random value. :arg timeout: Explicit operation timeout. Defaults to '30s'. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") return self.transport.perform_request( "POST", _make_path("_snapshot", repository, "_analyze"), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/snapshot.pyi000066400000000000000000000265521426163262700235510ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SnapshotClient(NamespacedClient): def create( self, *, repository: Any, snapshot: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete( self, *, repository: Any, snapshot: Any, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get( self, *, repository: Any, snapshot: Any, ignore_unavailable: Optional[bool] = ..., include_repository: Optional[bool] = ..., index_details: Optional[bool] = ..., master_timeout: Optional[Any] = ..., verbose: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_repository( self, *, repository: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_repository( self, *, repository: Optional[Any] = ..., local: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def create_repository( self, *, repository: Any, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., verify: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def restore( self, *, repository: Any, snapshot: Any, body: Optional[Mapping[str, Any]] = ..., master_timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def status( self, *, repository: Optional[Any] = ..., snapshot: Optional[Any] = ..., ignore_unavailable: Optional[bool] = ..., master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def verify_repository( self, *, repository: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def cleanup_repository( self, *, repository: Any, master_timeout: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def clone( self, *, repository: Any, snapshot: Any, target_snapshot: Any, body: Mapping[str, Any], master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def repository_analyze( self, *, repository: Any, blob_count: Optional[Any] = ..., concurrency: Optional[Any] = ..., detailed: Optional[bool] = ..., early_read_node_count: Optional[Any] = ..., max_blob_size: Optional[Any] = ..., max_total_data_size: Optional[Any] = ..., rare_action_probability: Optional[Any] = ..., rarely_abort_writes: Optional[bool] = ..., read_node_count: Optional[Any] = ..., seed: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/sql.py000066400000000000000000000127501426163262700223330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SqlClient(NamespacedClient): @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def clear_cursor(self, body, params=None, headers=None): """ Clears the SQL cursor ``_ :arg body: Specify the cursor value in the `cursor` element to clean the cursor. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_sql/close", params=params, headers=headers, body=body ) @query_params( "format", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def query(self, body, params=None, headers=None): """ Executes a SQL request ``_ :arg body: Use the `query` element to start a query. Use the `cursor` element to continue a query. :arg format: a short version of the Accept header, e.g. json, yaml """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_sql", params=params, headers=headers, body=body ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def translate(self, body, params=None, headers=None): """ Translates SQL into Elasticsearch queries ``_ :arg body: Specify the query in the `query` element. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "POST", "/_sql/translate", params=params, headers=headers, body=body ) @query_params( response_mimetypes=["application/json"], ) def delete_async(self, id, params=None, headers=None): """ Deletes an async SQL search or a stored synchronous SQL search. If the search is still running, the API cancels it. ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_sql", "async", "delete", id), params=params, headers=headers, ) @query_params( "delimiter", "format", "keep_alive", "wait_for_completion_timeout", response_mimetypes=["application/json"], ) def get_async(self, id, params=None, headers=None): """ Returns the current status and available results for an async SQL search or stored synchronous SQL search ``_ :arg id: The async search ID :arg delimiter: Separator for CSV results Default: , :arg format: Short version of the Accept header, e.g. json, yaml :arg keep_alive: Retention period for the search and its results Default: 5d :arg wait_for_completion_timeout: Duration to wait for complete results """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_sql", "async", id), params=params, headers=headers ) @query_params( response_mimetypes=["application/json"], ) def get_async_status(self, id, params=None, headers=None): """ Returns the current status of an async SQL search or a stored synchronous SQL search ``_ :arg id: The async search ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_sql", "async", "status", id), params=params, headers=headers, ) elasticsearch-py-7.17.6/elasticsearch/client/sql.pyi000066400000000000000000000125421426163262700225030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class SqlClient(NamespacedClient): def clear_cursor( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def query( self, *, body: Mapping[str, Any], format: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def translate( self, *, body: Mapping[str, Any], pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_async( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_async( self, *, id: Any, delimiter: Optional[Any] = ..., format: Optional[Any] = ..., keep_alive: Optional[Any] = ..., wait_for_completion_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_async_status( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/ssl.py000066400000000000000000000025301426163262700223300ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class SslClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def certificates(self, params=None, headers=None): """ Retrieves information about the X.509 certificates used to encrypt communications in the cluster. ``_ """ return self.transport.perform_request( "GET", "/_ssl/certificates", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/ssl.pyi000066400000000000000000000031531426163262700225030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class SslClient(NamespacedClient): def certificates( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/tasks.py000066400000000000000000000120021426163262700226470ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import warnings from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class TasksClient(NamespacedClient): @query_params( "actions", "detailed", "group_by", "nodes", "parent_task_id", "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) def list(self, params=None, headers=None): """ Returns a list of tasks. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg actions: A comma-separated list of actions that should be returned. Leave empty to return all. :arg detailed: Return detailed task information (default: false) :arg group_by: Group tasks by nodes or parent/child relationships Valid choices: nodes, parents, none Default: nodes :arg nodes: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg timeout: Explicit operation timeout :arg wait_for_completion: Wait for the matching tasks to complete (default: false) """ return self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers ) @query_params( "actions", "nodes", "parent_task_id", "wait_for_completion", response_mimetypes=["application/json"], ) def cancel(self, task_id=None, params=None, headers=None): """ Cancels a task, if it can be cancelled through an API. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg task_id: Cancel the task with specified task id (node_id:task_number) :arg actions: A comma-separated list of actions that should be cancelled. Leave empty to cancel all. :arg nodes: A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. :arg wait_for_completion: Should the request block until the cancellation of the task and its descendant tasks is completed. Defaults to false """ return self.transport.perform_request( "POST", _make_path("_tasks", task_id, "_cancel"), params=params, headers=headers, ) @query_params( "timeout", "wait_for_completion", response_mimetypes=["application/json"], ) def get(self, task_id=None, params=None, headers=None): """ Returns information about a task. ``_ .. warning:: This API is **experimental** so may include breaking changes or be removed in a future version :arg task_id: Return the task with specified id (node_id:task_number) :arg timeout: Explicit operation timeout :arg wait_for_completion: Wait for the matching tasks to complete (default: false) """ if task_id in SKIP_IN_PATH: warnings.warn( "Calling client.tasks.get() without a task_id is deprecated " "and will be removed in v8.0. Use client.tasks.list() instead.", category=DeprecationWarning, stacklevel=3, ) return self.transport.perform_request( "GET", _make_path("_tasks", task_id), params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/tasks.pyi000066400000000000000000000070761426163262700230370ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class TasksClient(NamespacedClient): def list( self, *, actions: Optional[Any] = ..., detailed: Optional[bool] = ..., group_by: Optional[Any] = ..., nodes: Optional[Any] = ..., parent_task_id: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def cancel( self, *, task_id: Optional[Any] = ..., actions: Optional[Any] = ..., nodes: Optional[Any] = ..., parent_task_id: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get( self, *, task_id: Optional[Any] = ..., timeout: Optional[Any] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/text_structure.py000066400000000000000000000076721426163262700246470ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _bulk_body, query_params class TextStructureClient(NamespacedClient): @query_params( "charset", "column_names", "delimiter", "explain", "format", "grok_pattern", "has_header_row", "line_merge_size_limit", "lines_to_sample", "quote", "should_trim_fields", "timeout", "timestamp_field", "timestamp_format", request_mimetypes=["application/x-ndjson"], response_mimetypes=["application/json"], ) def find_structure(self, body, params=None, headers=None): """ Finds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch. ``_ :arg body: The contents of the file to be analyzed :arg charset: Optional parameter to specify the character set of the file :arg column_names: Optional parameter containing a comma separated list of the column names for a delimited file :arg delimiter: Optional parameter to specify the delimiter character for a delimited file - must be a single character :arg explain: Whether to include a commentary on how the structure was derived :arg format: Optional parameter to specify the high level file format Valid choices: ndjson, xml, delimited, semi_structured_text :arg grok_pattern: Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi- structured text file :arg has_header_row: Optional parameter to specify whether a delimited file includes the column names in its first row :arg line_merge_size_limit: Maximum number of characters permitted in a single message when lines are merged to create messages. Default: 10000 :arg lines_to_sample: How many lines of the file should be included in the analysis Default: 1000 :arg quote: Optional parameter to specify the quote character for a delimited file - must be a single character :arg should_trim_fields: Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them :arg timeout: Timeout after which the analysis will be aborted Default: 25s :arg timestamp_field: Optional parameter to specify the timestamp field in the file :arg timestamp_format: Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") body = _bulk_body(self.transport.serializer, body) return self.transport.perform_request( "POST", "/_text_structure/find_structure", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/text_structure.pyi000066400000000000000000000044451426163262700250130ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Sequence, Tuple, Union, ) from .utils import NamespacedClient class TextStructureClient(NamespacedClient): def find_structure( self, *, body: Union[Sequence[Mapping[str, Any]], bytes, str], charset: Optional[Any] = ..., column_names: Optional[Any] = ..., delimiter: Optional[Any] = ..., explain: Optional[bool] = ..., format: Optional[Any] = ..., grok_pattern: Optional[Any] = ..., has_header_row: Optional[bool] = ..., line_merge_size_limit: Optional[Any] = ..., lines_to_sample: Optional[Any] = ..., quote: Optional[Any] = ..., should_trim_fields: Optional[bool] = ..., timeout: Optional[Any] = ..., timestamp_field: Optional[Any] = ..., timestamp_format: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/transform.py000066400000000000000000000244731426163262700235540ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class TransformClient(NamespacedClient): @query_params( "force", "timeout", response_mimetypes=["application/json"], ) def delete_transform(self, transform_id, params=None, headers=None): """ Deletes an existing transform. ``_ :arg transform_id: The id of the transform to delete :arg force: When `true`, the transform is deleted regardless of its current state. The default value is `false`, meaning that the transform must be `stopped` before it can be deleted. :arg timeout: Controls the time to wait for the transform deletion """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return self.transport.perform_request( "DELETE", _make_path("_transform", transform_id), params=params, headers=headers, ) @query_params( "allow_no_match", "exclude_generated", "from_", "size", response_mimetypes=["application/json"], ) def get_transform(self, transform_id=None, params=None, headers=None): """ Retrieves configuration information for transforms. ``_ :arg transform_id: The id or comma delimited list of id expressions of the transforms to get, '_all' or '*' implies get all transforms :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg exclude_generated: Omits fields that are illegal to set on transform PUT :arg from_: skips a number of transform configs, defaults to 0 :arg size: specifies a max number of transforms to get, defaults to 100 """ if "from_" in params: params["from"] = params.pop("from_") return self.transport.perform_request( "GET", _make_path("_transform", transform_id), params=params, headers=headers, ) @query_params( "allow_no_match", "from_", "size", response_mimetypes=["application/json"], ) def get_transform_stats(self, transform_id, params=None, headers=None): """ Retrieves usage information for transforms. ``_ :arg transform_id: The id of the transform for which to get stats. '_all' or '*' implies all transforms :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg from_: skips a number of transform stats, defaults to 0 :arg size: specifies a max number of transform stats to get, defaults to 100 """ if "from_" in params: params["from"] = params.pop("from_") if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return self.transport.perform_request( "GET", _make_path("_transform", transform_id, "_stats"), params=params, headers=headers, ) @query_params( "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def preview_transform( self, body=None, transform_id=None, params=None, headers=None ): """ Previews a transform. ``_ :arg body: The definition for the transform to preview :arg transform_id: The id of the transform to preview. :arg timeout: Controls the time to wait for the preview """ return self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_preview"), params=params, headers=headers, body=body, ) @query_params( "defer_validation", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_transform(self, transform_id, body, params=None, headers=None): """ Instantiates a transform. ``_ :arg transform_id: The id of the new transform. :arg body: The transform definition :arg defer_validation: If validations should be deferred until transform starts, defaults to false. :arg timeout: Controls the time to wait for the transform to start """ for param in (transform_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "PUT", _make_path("_transform", transform_id), params=params, headers=headers, body=body, ) @query_params( "timeout", response_mimetypes=["application/json"], ) def start_transform(self, transform_id, params=None, headers=None): """ Starts one or more transforms. ``_ :arg transform_id: The id of the transform to start :arg timeout: Controls the time to wait for the transform to start """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_start"), params=params, headers=headers, ) @query_params( "allow_no_match", "force", "timeout", "wait_for_checkpoint", "wait_for_completion", response_mimetypes=["application/json"], ) def stop_transform(self, transform_id, params=None, headers=None): """ Stops one or more transforms. ``_ :arg transform_id: The id of the transform to stop :arg allow_no_match: Whether to ignore if a wildcard expression matches no transforms. (This includes `_all` string or when no transforms have been specified) :arg force: Whether to force stop a failed transform or not. Default to false :arg timeout: Controls the time to wait until the transform has stopped. Default to 30 seconds :arg wait_for_checkpoint: Whether to wait for the transform to reach a checkpoint before stopping. Default to false :arg wait_for_completion: Whether to wait for the transform to fully stop before returning or not. Default to false """ if transform_id in SKIP_IN_PATH: raise ValueError( "Empty value passed for a required argument 'transform_id'." ) return self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_stop"), params=params, headers=headers, ) @query_params( "defer_validation", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def update_transform(self, transform_id, body, params=None, headers=None): """ Updates certain properties of a transform. ``_ :arg transform_id: The id of the transform. :arg body: The update transform definition :arg defer_validation: If validations should be deferred until transform starts, defaults to false. :arg timeout: Controls the time to wait for the update """ for param in (transform_id, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") return self.transport.perform_request( "POST", _make_path("_transform", transform_id, "_update"), params=params, headers=headers, body=body, ) @query_params( "dry_run", "timeout", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def upgrade_transforms(self, params=None, headers=None): """ Upgrades all transforms. ``_ :arg dry_run: Whether to only check for updates but don't execute :arg timeout: Controls the time to wait for the upgrade """ return self.transport.perform_request( "POST", "/_transform/_upgrade", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/transform.pyi000066400000000000000000000206351426163262700237210ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class TransformClient(NamespacedClient): def delete_transform( self, *, transform_id: Any, force: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_transform( self, *, transform_id: Optional[Any] = ..., allow_no_match: Optional[bool] = ..., exclude_generated: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_transform_stats( self, *, transform_id: Any, allow_no_match: Optional[bool] = ..., from_: Optional[Any] = ..., size: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def preview_transform( self, *, body: Optional[Mapping[str, Any]] = ..., transform_id: Optional[Any] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_transform( self, *, transform_id: Any, body: Mapping[str, Any], defer_validation: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def start_transform( self, *, transform_id: Any, timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stop_transform( self, *, transform_id: Any, allow_no_match: Optional[bool] = ..., force: Optional[bool] = ..., timeout: Optional[Any] = ..., wait_for_checkpoint: Optional[bool] = ..., wait_for_completion: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def update_transform( self, *, transform_id: Any, body: Mapping[str, Any], defer_validation: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def upgrade_transforms( self, *, dry_run: Optional[bool] = ..., timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/utils.py000066400000000000000000000360011426163262700226670ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import base64 import os import warnings import weakref from datetime import date, datetime from functools import wraps from .._version import __versionstr__ from ..compat import PY2, quote, string_types, to_bytes, to_str, unquote, urlparse # parts of URL to be omitted SKIP_IN_PATH = (None, "", b"", [], ()) # Switch to this mimetype if 'ELASTIC_CLIENT_APIVERSIONING=1/true' _COMPATIBILITY_MIMETYPE = "application/vnd.elasticsearch+json;compatible-with=%s" % ( __versionstr__.partition(".")[0] ) def _normalize_hosts(hosts): """ Helper function to transform hosts argument to :class:`~elasticsearch.Elasticsearch` to a list of dicts. """ # if hosts are empty, just defer to defaults down the line if hosts is None: return [{}] # passed in just one string if isinstance(hosts, string_types): hosts = [hosts] out = [] # normalize hosts to dicts for host in hosts: if isinstance(host, string_types): if "://" not in host: host = "//%s" % host parsed_url = urlparse(host) h = {"host": parsed_url.hostname} if parsed_url.port: h["port"] = parsed_url.port if parsed_url.scheme == "https": h["port"] = parsed_url.port or 443 h["use_ssl"] = True if parsed_url.username or parsed_url.password: h["http_auth"] = "%s:%s" % ( unquote(parsed_url.username), unquote(parsed_url.password), ) if parsed_url.path and parsed_url.path != "/": h["url_prefix"] = parsed_url.path out.append(h) else: out.append(host) return out def _escape(value): """ Escape a single value of a URL string or a query parameter. If it is a list or tuple, turn it into a comma-separated string first. """ # make sequences into comma-separated stings if isinstance(value, (list, tuple)): value = ",".join(value) # dates and datetimes into isoformat elif isinstance(value, (date, datetime)): value = value.isoformat() # make bools into true/false strings elif isinstance(value, bool): value = str(value).lower() # don't decode bytestrings elif isinstance(value, bytes): return value # encode strings to utf-8 if isinstance(value, string_types): if PY2 and isinstance(value, unicode): # noqa: F821 return value.encode("utf-8") if not PY2 and isinstance(value, str): return value.encode("utf-8") return str(value) def _make_path(*parts): """ Create a URL string from parts, omit all `None` values and empty strings. Convert lists and tuples to comma separated values. """ # TODO: maybe only allow some parts to be lists/tuples ? return "/" + "/".join( # preserve ',' and '*' in url for nicer URLs in logs quote(_escape(p), b",*") for p in parts if p not in SKIP_IN_PATH ) # parameters that apply to all methods GLOBAL_PARAMS = ("pretty", "human", "error_trace", "format", "filter_path") def query_params(*es_query_params, **kwargs): """ Decorator that pops all accepted parameters from method's kwargs and puts them in the params argument. """ request_mimetypes = kwargs.pop("request_mimetypes", []) response_mimetypes = kwargs.pop("response_mimetypes", []) default_content_type = "".join(request_mimetypes[:1]) default_accept = ",".join(response_mimetypes) def compat_mimetype(mimetypes): return [ _COMPATIBILITY_MIMETYPE if mimetype in ( "application/json", "application/x-ndjson", "application/vnd.mapbox-vector-tile", ) else mimetype for mimetype in mimetypes ] compat_content_type = "".join(compat_mimetype(request_mimetypes)[:1]) compat_accept = ",".join(compat_mimetype(response_mimetypes)) body_params = kwargs.pop("body_params", None) body_only_params = set(body_params or ()) - set(es_query_params) body_name = kwargs.pop("body_name", None) body_required = kwargs.pop("body_required", False) type_possible_in_params = "type" in es_query_params # There should be no APIs defined with both 'body_params' and a named body. assert not (body_name and body_params) # 'body_required' implies there's no named body and that body_params are defined. assert not (body_name and body_required) assert not body_required or body_params def _wrapper(func): @wraps(func) def _wrapped(*args, **kwargs): params = (kwargs.pop("params", None) or {}).copy() headers = { k.lower(): v for k, v in (kwargs.pop("headers", None) or {}).copy().items() } if "opaque_id" in kwargs: headers["x-opaque-id"] = kwargs.pop("opaque_id") # Detect compatibility mode and set the 'Accept' and 'Content-Type' # headers to the compatibility mimetype if detected. try: if os.environ["ELASTIC_CLIENT_APIVERSIONING"] not in ("true", "1"): raise KeyError # Unset is the same as env var not being 'true' or '1' accept = compat_accept content_type = compat_content_type except KeyError: accept = default_accept content_type = default_content_type # Set the mimetype headers for the request if accept: headers.setdefault("accept", accept) if content_type: headers.setdefault("content-type", content_type) http_auth = kwargs.pop("http_auth", None) api_key = kwargs.pop("api_key", None) # Detect when we should absorb body parameters into 'body' # We only do this when there's no 'body' parameter, no # positional arguments, and at least one parameter we can # serialize in the body. using_body_kwarg = kwargs.get("body", None) is not None using_positional_args = args and len(args) > 1 # The 'doc_type' parameter is deprecated in the query # string. This was generated and missed in 7.x so to # push users to use 'type' instead of 'doc_type' in 8.x # we deprecate it here. if type_possible_in_params: doc_type_in_params = params and "doc_type" in params doc_type_in_kwargs = "doc_type" in kwargs if doc_type_in_params or doc_type_in_kwargs: warnings.warn( "The 'doc_type' parameter is deprecated, use 'type' for this " "API instead. See https://github.com/elastic/elasticsearch-py/" "issues/1698 for more information", category=DeprecationWarning, stacklevel=2, ) if doc_type_in_params: params["type"] = params.pop("doc_type") if doc_type_in_kwargs: kwargs["type"] = kwargs.pop("doc_type") if using_body_kwarg or using_positional_args: # If there are any body-only parameters then we raise a 'TypeError' # to alert the user they have to either not use a 'body' parameter # or to put the parameter into the body. body_only_params_in_use = body_only_params.intersection(kwargs) if body_only_params_in_use: # Make sure the error message prose makes sense! params_prose = "', '".join(sorted(body_only_params_in_use)) plural_params = len(body_only_params_in_use) > 1 raise TypeError( "The '%s' parameter%s %s only serialized in the request body " "and can't be combined with the 'body' parameter. Either stop using the " "'body' parameter and use keyword-arguments only or move the specified " "parameters into the 'body'. See https://github.com/elastic/elasticsearch-py/" "issues/1698 for more information" % ( params_prose, "s" if plural_params else "", "are" if plural_params else "is", ) ) # If there's no parameter overlap we still warn the user # that the 'body' parameter is deprecated for this API. if using_body_kwarg and body_params: warnings.warn( "The 'body' parameter is deprecated for the '%s' API and " "will be removed in a future version. Instead use API parameters directly. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for " "more information" % str(func.__name__), DeprecationWarning, stacklevel=2, ) # If positional arguments are being used we also warn about that being deprecated. if using_positional_args: warnings.warn( "Using positional arguments for APIs is deprecated and will be " "disabled in 8.0.0. Instead use only keyword arguments for all APIs. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for " "more information", DeprecationWarning, stacklevel=2, ) # We need to serialize all these parameters into a JSON body. elif set(body_params or ()).intersection(kwargs): body = {} for param in body_params: value = kwargs.pop(param, None) if value is not None: body[param.rstrip("_")] = value kwargs["body"] = body # Since we've deprecated 'body' we set body={} if there # should be a body on JSON-field APIs but none of those fields # are filled. elif body_required: kwargs["body"] = {} # If there's a named body parameter then we transform it to 'body' # for backwards compatibility with libraries like APM. # Otherwise we warn the user about 'body' being deprecated. if body_name: if body_name in kwargs: # If passed both 'body' and the named body param we raise an error. if using_body_kwarg: raise TypeError( "Can't use '%s' and 'body' parameters together because '%s' " "is an alias for 'body'. Instead you should only use the " "'%s' parameter. See https://github.com/elastic/elasticsearch-py/" "issues/1698 for more information" % ( body_name, body_name, body_name, ) ) kwargs["body"] = kwargs.pop(body_name) # Warn if user passes 'body' but should be using the named body parameter. elif using_body_kwarg: warnings.warn( "The 'body' parameter is deprecated for the '%s' API and " "will be removed in a future version. Instead use the '%s' parameter. " "See https://github.com/elastic/elasticsearch-py/issues/1698 " "for more information" % (str(func.__name__), body_name), category=DeprecationWarning, stacklevel=2, ) if http_auth is not None and api_key is not None: raise ValueError( "Only one of 'http_auth' and 'api_key' may be passed at a time" ) elif http_auth is not None: headers["authorization"] = "Basic %s" % ( _base64_auth_header(http_auth), ) elif api_key is not None: headers["authorization"] = "ApiKey %s" % (_base64_auth_header(api_key),) for p in es_query_params + GLOBAL_PARAMS: if p in kwargs: v = kwargs.pop(p) if v is not None: params[p] = _escape(v) # don't treat ignore, request_timeout, and opaque_id as other params to avoid escaping for p in ("ignore", "request_timeout"): if p in kwargs: params[p] = kwargs.pop(p) return func(*args, params=params, headers=headers, **kwargs) return _wrapped return _wrapper def _bulk_body(serializer, body): # if not passed in a string, serialize items and join by newline if not isinstance(body, string_types): body = "\n".join(map(serializer.dumps, body)) # bulk body must end with a newline if isinstance(body, bytes): if not body.endswith(b"\n"): body += b"\n" elif isinstance(body, string_types) and not body.endswith("\n"): body += "\n" return body def _base64_auth_header(auth_value): """Takes either a 2-tuple or a base64-encoded string and returns a base64-encoded string to be used as an HTTP authorization header. """ if isinstance(auth_value, (list, tuple)): auth_value = base64.b64encode(to_bytes(":".join(auth_value))) return to_str(auth_value) class NamespacedClient(object): def __init__(self, client): self.client = client @property def transport(self): return self.client.transport class AddonClient(NamespacedClient): @classmethod def infect_client(cls, client): addon = cls(weakref.proxy(client)) setattr(client, cls.namespace, addon) return client elasticsearch-py-7.17.6/elasticsearch/client/utils.pyi000066400000000000000000000037271426163262700230510ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals from typing import ( Any, Callable, Collection, Dict, Iterable, List, Mapping, Optional, Tuple, TypeVar, Union, ) from ..client import Elasticsearch from ..serializer import Serializer from ..transport import Transport T = TypeVar("T") SKIP_IN_PATH: Collection[Any] def _normalize_hosts( hosts: Optional[Union[str, Collection[Union[str, Dict[str, Any]]]]] ) -> List[Dict[str, Any]]: ... def _escape(value: Any) -> str: ... def _make_path(*parts: Any) -> str: ... GLOBAL_PARAMS: Tuple[str, ...] def query_params( *es_query_params: str, request_mimetypes: Optional[List[str]] = ..., response_mimetypes: Optional[List[str]] = ..., body_params: Optional[List[str]] = ..., body_name: Optional[str] = ..., body_required: Optional[bool] = ... ) -> Callable[[Callable[..., T]], Callable[..., T]]: ... def _bulk_body( serializer: Serializer, body: Union[str, bytes, Mapping[str, Any], Iterable[Any]] ) -> Union[str, Mapping[str, Any]]: ... class NamespacedClient: client: Elasticsearch def __init__(self, client: Elasticsearch) -> None: ... @property def transport(self) -> Transport: ... elasticsearch-py-7.17.6/elasticsearch/client/watcher.py000066400000000000000000000205501426163262700231660ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class WatcherClient(NamespacedClient): @query_params( response_mimetypes=["application/json"], ) def ack_watch(self, watch_id, action_id=None, params=None, headers=None): """ Acknowledges a watch, manually throttling the execution of the watch's actions. ``_ :arg watch_id: Watch ID :arg action_id: A comma-separated list of the action ids to be acked """ if watch_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'watch_id'.") return self.transport.perform_request( "PUT", _make_path("_watcher", "watch", watch_id, "_ack", action_id), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def activate_watch(self, watch_id, params=None, headers=None): """ Activates a currently inactive watch. ``_ :arg watch_id: Watch ID """ if watch_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'watch_id'.") return self.transport.perform_request( "PUT", _make_path("_watcher", "watch", watch_id, "_activate"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def deactivate_watch(self, watch_id, params=None, headers=None): """ Deactivates a currently active watch. ``_ :arg watch_id: Watch ID """ if watch_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'watch_id'.") return self.transport.perform_request( "PUT", _make_path("_watcher", "watch", watch_id, "_deactivate"), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def delete_watch(self, id, params=None, headers=None): """ Removes a watch from Watcher. ``_ :arg id: Watch ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "DELETE", _make_path("_watcher", "watch", id), params=params, headers=headers, ) @query_params( "debug", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def execute_watch(self, body=None, id=None, params=None, headers=None): """ Forces the execution of a stored watch. ``_ :arg body: Execution control :arg id: Watch ID :arg debug: indicates whether the watch should execute in debug mode """ return self.transport.perform_request( "PUT", _make_path("_watcher", "watch", id, "_execute"), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def get_watch(self, id, params=None, headers=None): """ Retrieves a watch by its ID. ``_ :arg id: Watch ID """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "GET", _make_path("_watcher", "watch", id), params=params, headers=headers ) @query_params( "active", "if_primary_term", "if_seq_no", "version", request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def put_watch(self, id, body=None, params=None, headers=None): """ Creates a new watch, or updates an existing one. ``_ :arg id: Watch ID :arg body: The watch :arg active: Specify whether the watch is in/active by default :arg if_primary_term: only update the watch if the last operation that has changed the watch has the specified primary term :arg if_seq_no: only update the watch if the last operation that has changed the watch has the specified sequence number :arg version: Explicit version number for concurrency control """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") return self.transport.perform_request( "PUT", _make_path("_watcher", "watch", id), params=params, headers=headers, body=body, ) @query_params( response_mimetypes=["application/json"], ) def start(self, params=None, headers=None): """ Starts Watcher if it is not already running. ``_ """ return self.transport.perform_request( "POST", "/_watcher/_start", params=params, headers=headers ) @query_params( "emit_stacktraces", response_mimetypes=["application/json"], ) def stats(self, metric=None, params=None, headers=None): """ Retrieves the current Watcher metrics. ``_ :arg metric: Controls what additional stat metrics should be include in the response Valid choices: _all, queued_watches, current_watches, pending_watches :arg emit_stacktraces: Emits stack traces of currently running watches """ return self.transport.perform_request( "GET", _make_path("_watcher", "stats", metric), params=params, headers=headers, ) @query_params( response_mimetypes=["application/json"], ) def stop(self, params=None, headers=None): """ Stops Watcher if it is running. ``_ """ return self.transport.perform_request( "POST", "/_watcher/_stop", params=params, headers=headers ) @query_params( request_mimetypes=["application/json"], response_mimetypes=["application/json"], ) def query_watches(self, body=None, params=None, headers=None): """ Retrieves stored watches. ``_ :arg body: From, size, query, sort and search_after """ return self.transport.perform_request( "POST", "/_watcher/_query/watches", params=params, headers=headers, body=body, ) elasticsearch-py-7.17.6/elasticsearch/client/watcher.pyi000066400000000000000000000221301426163262700233330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import ( Any, Collection, Dict, Mapping, MutableMapping, Optional, Tuple, Union, ) from .utils import NamespacedClient class WatcherClient(NamespacedClient): def ack_watch( self, *, watch_id: Any, action_id: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def activate_watch( self, *, watch_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def deactivate_watch( self, *, watch_id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def delete_watch( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def execute_watch( self, *, body: Optional[Mapping[str, Any]] = ..., id: Optional[Any] = ..., debug: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def get_watch( self, *, id: Any, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def put_watch( self, *, id: Any, body: Optional[Mapping[str, Any]] = ..., active: Optional[bool] = ..., if_primary_term: Optional[Any] = ..., if_seq_no: Optional[Any] = ..., version: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def start( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stats( self, *, metric: Optional[Any] = ..., emit_stacktraces: Optional[bool] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def stop( self, *, pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def query_watches( self, *, body: Optional[Mapping[str, Any]] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/client/xpack.py000066400000000000000000000043301426163262700226350ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .utils import NamespacedClient, query_params class XPackClient(NamespacedClient): def __getattr__(self, attr_name): return getattr(self.client, attr_name) # AUTO-GENERATED-API-DEFINITIONS # @query_params( "accept_enterprise", "categories", response_mimetypes=["application/json"], ) def info(self, params=None, headers=None): """ Retrieves information about the installed X-Pack features. ``_ :arg accept_enterprise: If an enterprise license is installed, return the type and mode as 'enterprise' (default: false) :arg categories: Comma-separated list of info categories. Can be any of: build, license, features """ return self.transport.perform_request( "GET", "/_xpack", params=params, headers=headers ) @query_params( "master_timeout", response_mimetypes=["application/json"], ) def usage(self, params=None, headers=None): """ Retrieves usage information about the installed X-Pack features. ``_ :arg master_timeout: Specify timeout for watch write operation """ return self.transport.perform_request( "GET", "/_xpack/usage", params=params, headers=headers ) elasticsearch-py-7.17.6/elasticsearch/client/xpack.pyi000066400000000000000000000050521426163262700230100ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Collection, Dict, MutableMapping, Optional, Tuple, Union from .utils import NamespacedClient class XPackClient(NamespacedClient): def __getattr__(self, attr_name: str) -> Any: return getattr(self.client, attr_name) # AUTO-GENERATED-API-DEFINITIONS # def info( self, *, accept_enterprise: Optional[bool] = ..., categories: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... def usage( self, *, master_timeout: Optional[Any] = ..., pretty: Optional[bool] = ..., human: Optional[bool] = ..., error_trace: Optional[bool] = ..., format: Optional[str] = ..., filter_path: Optional[Union[str, Collection[str]]] = ..., request_timeout: Optional[Union[int, float]] = ..., ignore: Optional[Union[int, Collection[int]]] = ..., opaque_id: Optional[str] = ..., http_auth: Optional[Union[str, Tuple[str, str]]] = ..., api_key: Optional[Union[str, Tuple[str, str]]] = ..., params: Optional[MutableMapping[str, Any]] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Dict[str, Any]: ... elasticsearch-py-7.17.6/elasticsearch/compat.py000066400000000000000000000044561426163262700215450ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys PY2 = sys.version_info[0] == 2 if PY2: string_types = (basestring,) # noqa: F821 from itertools import imap as map from urllib import quote, quote_plus, unquote, urlencode from Queue import Queue from urlparse import urlparse def to_str(x, encoding="ascii"): if not isinstance(x, str): return x.encode(encoding) return x to_bytes = to_str else: string_types = str, bytes from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse map = map from queue import Queue def to_str(x, encoding="ascii"): if not isinstance(x, str): return x.decode(encoding) return x def to_bytes(x, encoding="ascii"): if not isinstance(x, bytes): return x.encode(encoding) return x try: from collections.abc import Mapping except ImportError: from collections import Mapping try: reraise_exceptions = (RecursionError,) except NameError: reraise_exceptions = () try: import asyncio reraise_exceptions += (asyncio.CancelledError,) except (ImportError, AttributeError): pass try: from threading import Lock except ImportError: # Python <3.7 isn't guaranteed to have threading support. class Lock: def __enter__(self): pass def __exit__(self, *_): pass __all__ = [ "string_types", "reraise_exceptions", "quote_plus", "quote", "urlencode", "unquote", "urlparse", "map", "Queue", "Mapping", ] elasticsearch-py-7.17.6/elasticsearch/compat.pyi000066400000000000000000000031741426163262700217120ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys from typing import Callable, Tuple, Type, Union PY2: bool string_types: Tuple[type, ...] to_str: Callable[[Union[str, bytes]], str] to_bytes: Callable[[Union[str, bytes]], bytes] reraise_exceptions: Tuple[Type[Exception], ...] if sys.version_info[0] == 2: from itertools import imap as map from urllib import quote as quote from urllib import quote_plus as quote_plus from urllib import unquote as unquote from urllib import urlencode as urlencode from Queue import Queue as Queue from urlparse import urlparse as urlparse else: from urllib.parse import quote as quote from urllib.parse import quote_plus as quote_plus from urllib.parse import unquote as unquote from urllib.parse import urlencode as urlencode from urllib.parse import urlparse as urlparse map = map from queue import Queue as Queue elasticsearch-py-7.17.6/elasticsearch/connection/000077500000000000000000000000001426163262700220365ustar00rootroot00000000000000elasticsearch-py-7.17.6/elasticsearch/connection/__init__.py000066400000000000000000000020351426163262700241470ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .base import Connection from .http_requests import RequestsHttpConnection from .http_urllib3 import Urllib3HttpConnection, create_ssl_context __all__ = [ "Connection", "RequestsHttpConnection", "Urllib3HttpConnection", "create_ssl_context", ] elasticsearch-py-7.17.6/elasticsearch/connection/__init__.pyi000066400000000000000000000020271426163262700243210ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .base import Connection as Connection from .http_requests import RequestsHttpConnection as RequestsHttpConnection from .http_urllib3 import Urllib3HttpConnection as Urllib3HttpConnection from .http_urllib3 import create_ssl_context as create_ssl_context elasticsearch-py-7.17.6/elasticsearch/connection/base.py000066400000000000000000000305101426163262700233210ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import binascii import gzip import io import logging import re import warnings from platform import python_version try: import simplejson as json except ImportError: import json from .. import __versionstr__ from ..compat import PY2 from ..exceptions import ( HTTP_EXCEPTIONS, ElasticsearchWarning, ImproperlyConfigured, TransportError, ) logger = logging.getLogger("elasticsearch") # create the elasticsearch.trace logger, but only set propagate to False if the # logger hasn't already been configured _tracer_already_configured = "elasticsearch.trace" in logging.Logger.manager.loggerDict tracer = logging.getLogger("elasticsearch.trace") if not _tracer_already_configured: tracer.propagate = False _WARNING_RE = re.compile(r"\"([^\"]*)\"") class Connection(object): """ Class responsible for maintaining a connection to an Elasticsearch node. It holds persistent connection pool to it and it's main interface (`perform_request`) is thread-safe. Also responsible for logging. :arg host: hostname of the node (default: localhost) :arg port: port to use (integer, default: 9200) :arg use_ssl: use ssl for the connection if `True` :arg url_prefix: optional url prefix for elasticsearch :arg timeout: default timeout in seconds (float, default: 10) :arg http_compress: Use gzip compression :arg cloud_id: The Cloud ID from ElasticCloud. Convenient way to connect to cloud instances. :arg opaque_id: Send this value in the 'X-Opaque-Id' HTTP header For tracing all requests made by this transport. """ HTTP_CLIENT_META = None def __init__( self, host="localhost", port=None, use_ssl=False, url_prefix="", timeout=10, headers=None, http_compress=None, cloud_id=None, api_key=None, opaque_id=None, meta_header=True, **kwargs ): if cloud_id: try: _, cloud_id = cloud_id.split(":") parent_dn, es_uuid = ( binascii.a2b_base64(cloud_id.encode("utf-8")) .decode("utf-8") .split("$")[:2] ) if ":" in parent_dn: parent_dn, _, parent_port = parent_dn.rpartition(":") if port is None and parent_port != "443": port = int(parent_port) except (ValueError, IndexError): raise ImproperlyConfigured("'cloud_id' is not properly formatted") host = "%s.%s" % (es_uuid, parent_dn) use_ssl = True if http_compress is None: http_compress = True # If cloud_id isn't set and port is default then use 9200. # Cloud should use '443' by default via the 'https' scheme. elif port is None: port = 9200 # Work-around if the implementing class doesn't # define the headers property before calling super().__init__() if not hasattr(self, "headers"): self.headers = {} headers = headers or {} for key in headers: self.headers[key.lower()] = headers[key] if opaque_id: self.headers["x-opaque-id"] = opaque_id self.headers.setdefault("user-agent", self._get_default_user_agent()) if api_key is not None: self.headers["authorization"] = self._get_api_key_header_val(api_key) if http_compress: self.headers["accept-encoding"] = "gzip,deflate" scheme = kwargs.get("scheme", "http") if use_ssl or scheme == "https": scheme = "https" use_ssl = True self.use_ssl = use_ssl self.http_compress = http_compress or False self.scheme = scheme self.hostname = host self.port = port if ":" in host: # IPv6 self.host = "%s://[%s]" % (scheme, host) else: self.host = "%s://%s" % (scheme, host) if self.port is not None: self.host += ":%s" % self.port if url_prefix: url_prefix = "/" + url_prefix.strip("/") self.url_prefix = url_prefix self.timeout = timeout if not isinstance(meta_header, bool): raise TypeError("meta_header must be of type bool") self.meta_header = meta_header def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, self.host) def __eq__(self, other): if not isinstance(other, Connection): raise TypeError("Unsupported equality check for %s and %s" % (self, other)) return self.__hash__() == other.__hash__() def __hash__(self): return id(self) def _gzip_compress(self, body): buf = io.BytesIO() with gzip.GzipFile(fileobj=buf, mode="wb") as f: f.write(body) return buf.getvalue() def _raise_warnings(self, warning_headers): """If 'headers' contains a 'Warning' header raise the warnings to be seen by the user. Takes an iterable of string values from any number of 'Warning' headers. """ if not warning_headers: return # Grab only the message from each header, the rest is discarded. # Format is: '(number) Elasticsearch-(version)-(instance) "(message)"' warning_messages = [] for header in warning_headers: # Because 'Requests' does it's own folding of multiple HTTP headers # into one header delimited by commas (totally standard compliant, just # annoying for cases like this) we need to expect there may be # more than one message per 'Warning' header. matches = _WARNING_RE.findall(header) if matches: warning_messages.extend(matches) else: # Don't want to throw away any warnings, even if they # don't follow the format we have now. Use the whole header. warning_messages.append(header) for message in warning_messages: warnings.warn(message, category=ElasticsearchWarning) def _pretty_json(self, data): # pretty JSON in tracer curl logs try: return json.dumps( json.loads(data), sort_keys=True, indent=2, separators=(",", ": ") ).replace("'", r"\u0027") except (ValueError, TypeError): # non-json data or a bulk request return data def _log_trace(self, method, path, body, status_code, response, duration): if not tracer.isEnabledFor(logging.INFO) or not tracer.handlers: return # include pretty in trace curls path = path.replace("?", "?pretty&", 1) if "?" in path else path + "?pretty" if self.url_prefix: path = path.replace(self.url_prefix, "", 1) tracer.info( "curl %s-X%s 'http://localhost:9200%s' -d '%s'", "-H 'Content-Type: application/json' " if body else "", method, path, self._pretty_json(body) if body else "", ) if tracer.isEnabledFor(logging.DEBUG): tracer.debug( "#[%s] (%.3fs)\n#%s", status_code, duration, self._pretty_json(response).replace("\n", "\n#") if response else "", ) def perform_request( self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None, ): raise NotImplementedError() def log_request_success( self, method, full_url, path, body, status_code, response, duration ): """Log a successful API call.""" # TODO: optionally pass in params instead of full_url and do urlencode only when needed # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth if body is not None: try: body = body.decode("utf-8", "ignore") except AttributeError: pass if response is not None: response = loggable_response_body(response) logger.info( "%s %s [status:%s request:%.3fs]", method, full_url, status_code, duration ) logger.debug("> %s", body) logger.debug("< %s", response) self._log_trace(method, path, body, status_code, response, duration) def log_request_fail( self, method, full_url, path, body, duration, status_code=None, response=None, exception=None, ): """Log an unsuccessful API call.""" # do not log 404s on HEAD requests if method == "HEAD" and status_code == 404: return logger.warning( "%s %s [status:%s request:%.3fs]", method, full_url, status_code or "N/A", duration, exc_info=exception is not None, ) # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth if body: try: body = body.decode("utf-8", "ignore") except AttributeError: pass if response is not None: response = loggable_response_body(response) logger.debug("> %s", body) self._log_trace(method, path, body, status_code, response, duration) if response is not None: logger.debug("< %s", response) def _raise_error(self, status_code, raw_data): """Locate appropriate exception and raise it.""" error_message = raw_data additional_info = None try: if raw_data: additional_info = json.loads(raw_data) error_message = additional_info.get("error", error_message) if isinstance(error_message, dict) and "type" in error_message: error_message = error_message["type"] except (ValueError, TypeError) as err: logger.warning("Undecodable raw error response from server: %s", err) raise HTTP_EXCEPTIONS.get(status_code, TransportError)( status_code, error_message, additional_info ) def _get_default_user_agent(self): return "elasticsearch-py/%s (Python %s)" % (__versionstr__, python_version()) def _get_api_key_header_val(self, api_key): """ Check the type of the passed api_key and return the correct header value for the `API Key authentication ` :arg api_key, either a tuple or a base64 encoded string """ if isinstance(api_key, (tuple, list)): s = "{0}:{1}".format(api_key[0], api_key[1]).encode("utf-8") return "ApiKey " + binascii.b2a_base64(s).rstrip(b"\r\n").decode("utf-8") return "ApiKey " + api_key def loggable_response_body(response): # If 'response' isn't unicode we need to try converting it to # unicode otherwise it's likely binary so should be encoded # properly. On Python 3.x this works out fine. if PY2 and not isinstance(response, unicode): # noqa try: response = response.decode("utf-8") except (AttributeError, UnicodeError): # Encodes unprintable characters to '\xXX' hex # like how is done in Python 3.x in bytes.__repr__ response = u"b" + repr(response).decode("utf-8") return response elasticsearch-py-7.17.6/elasticsearch/connection/base.pyi000066400000000000000000000064601426163262700235010ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging from typing import ( Any, Collection, Dict, List, Mapping, MutableMapping, NoReturn, Optional, Sequence, Tuple, Union, ) logger: logging.Logger tracer: logging.Logger class Connection(object): headers: Dict[str, str] use_ssl: bool http_compress: bool scheme: str hostname: str port: Optional[int] host: str url_prefix: str timeout: Optional[Union[float, int]] meta_header: bool def __init__( self, host: str = ..., port: Optional[int] = ..., use_ssl: bool = ..., url_prefix: str = ..., timeout: Optional[Union[float, int]] = ..., headers: Optional[Mapping[str, str]] = ..., http_compress: Optional[bool] = ..., cloud_id: Optional[str] = ..., api_key: Optional[Union[Tuple[str, str], List[str], str]] = ..., opaque_id: Optional[str] = ..., meta_header: bool = ..., **kwargs: Any ) -> None: ... def __repr__(self) -> str: ... def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... def _gzip_compress(self, body: bytes) -> bytes: ... def _raise_warnings(self, warning_headers: Sequence[str]) -> None: ... def _pretty_json(self, data: Any) -> str: ... def _log_trace( self, method: Any, path: Any, body: Any, status_code: Any, response: Any, duration: Any, ) -> None: ... def perform_request( self, method: str, url: str, params: Optional[MutableMapping[str, Any]] = ..., body: Optional[bytes] = ..., timeout: Optional[Union[int, float]] = ..., ignore: Collection[int] = ..., headers: Optional[MutableMapping[str, str]] = ..., ) -> Tuple[int, Mapping[str, str], str]: ... def log_request_success( self, method: str, full_url: str, path: str, body: Optional[bytes], status_code: int, response: str, duration: float, ) -> None: ... def log_request_fail( self, method: str, full_url: str, path: str, body: Optional[bytes], duration: float, status_code: Optional[int] = ..., response: Optional[str] = ..., exception: Optional[Exception] = ..., ) -> None: ... def _raise_error(self, status_code: int, raw_data: str) -> NoReturn: ... def _get_default_user_agent(self) -> str: ... def _get_api_key_header_val(self, api_key: Any) -> str: ... elasticsearch-py-7.17.6/elasticsearch/connection/http_requests.py000066400000000000000000000202711426163262700253240ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import time import warnings from ..compat import reraise_exceptions, string_types, urlencode from ..exceptions import ( ConnectionError, ConnectionTimeout, ImproperlyConfigured, SSLError, ) from ..utils import _client_meta_version from .base import Connection try: import requests REQUESTS_AVAILABLE = True _REQUESTS_META_VERSION = _client_meta_version(requests.__version__) except ImportError: REQUESTS_AVAILABLE = False _REQUESTS_META_VERSION = "" class RequestsHttpConnection(Connection): """ Connection using the `requests` library. :arg http_auth: optional http auth information as either ':' separated string or a tuple. Any value will be passed into requests as `auth`. :arg use_ssl: use ssl for the connection if `True` :arg verify_certs: whether to verify SSL certificates :arg ssl_show_warn: show warning when verify certs is disabled :arg ca_certs: optional path to CA bundle. By default standard requests' bundle will be used. :arg client_cert: path to the file containing the private key and the certificate, or cert only if using client_key :arg client_key: path to the file containing the private key if using separate cert and key files (client_cert will contain only the cert) :arg headers: any custom http headers to be add to requests :arg http_compress: Use gzip compression :arg cloud_id: The Cloud ID from ElasticCloud. Convenient way to connect to cloud instances. Other host connection params will be ignored. :arg api_key: optional API Key authentication as either base64 encoded string or a tuple. :arg opaque_id: Send this value in the 'X-Opaque-Id' HTTP header For tracing all requests made by this transport. """ HTTP_CLIENT_META = ("rq", _REQUESTS_META_VERSION) def __init__( self, host="localhost", port=None, http_auth=None, use_ssl=False, verify_certs=True, ssl_show_warn=True, ca_certs=None, client_cert=None, client_key=None, headers=None, http_compress=None, cloud_id=None, api_key=None, opaque_id=None, **kwargs ): if not REQUESTS_AVAILABLE: raise ImproperlyConfigured( "Please install requests to use RequestsHttpConnection." ) # Initialize Session so .headers works before calling super().__init__(). self.session = requests.Session() for key in list(self.session.headers): self.session.headers.pop(key) super(RequestsHttpConnection, self).__init__( host=host, port=port, use_ssl=use_ssl, headers=headers, http_compress=http_compress, cloud_id=cloud_id, api_key=api_key, opaque_id=opaque_id, **kwargs ) if not self.http_compress: # Need to set this to 'None' otherwise Requests adds its own. self.session.headers["accept-encoding"] = None if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = tuple(http_auth) elif isinstance(http_auth, string_types): http_auth = tuple(http_auth.split(":", 1)) self.session.auth = http_auth self.base_url = "%s%s" % ( self.host, self.url_prefix, ) self.session.verify = verify_certs if not client_key: self.session.cert = client_cert elif client_cert: # cert is a tuple of (certfile, keyfile) self.session.cert = (client_cert, client_key) if ca_certs: if not verify_certs: raise ImproperlyConfigured( "You cannot pass CA certificates when verify SSL is off." ) self.session.verify = ca_certs if not ssl_show_warn: requests.packages.urllib3.disable_warnings() if self.use_ssl and not verify_certs and ssl_show_warn: warnings.warn( "Connecting to %s using SSL with verify_certs=False is insecure." % self.host ) def perform_request( self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None ): url = self.base_url + url headers = headers or {} if params: url = "%s?%s" % (url, urlencode(params)) orig_body = body if self.http_compress and body: body = self._gzip_compress(body) headers["content-encoding"] = "gzip" start = time.time() request = requests.Request(method=method, headers=headers, url=url, data=body) prepared_request = self.session.prepare_request(request) settings = self.session.merge_environment_settings( prepared_request.url, {}, None, None, None ) send_kwargs = {"timeout": timeout or self.timeout} send_kwargs.update(settings) try: response = self.session.send(prepared_request, **send_kwargs) duration = time.time() - start response_headers = { header.lower(): value for header, value in response.headers.items() } content_type = response_headers.get("content-type", "") raw_data = response.content # The 'application/vnd.mapbox-vector-file' type shouldn't be # decoded into text, instead should be forwarded as bytes. if content_type != "application/vnd.mapbox-vector-tile": raw_data = raw_data.decode("utf-8", "surrogatepass") except reraise_exceptions: raise except Exception as e: self.log_request_fail( method, url, prepared_request.path_url, orig_body, time.time() - start, exception=e, ) if isinstance(e, requests.exceptions.SSLError): raise SSLError("N/A", str(e), e) if isinstance(e, requests.Timeout): raise ConnectionTimeout("TIMEOUT", str(e), e) raise ConnectionError("N/A", str(e), e) # raise warnings if any from the 'Warnings' header. warnings_headers = ( (response_headers["warning"],) if "warning" in response_headers else () ) self._raise_warnings(warnings_headers) # raise errors based on http status codes, let the client handle those if needed if ( not (200 <= response.status_code < 300) and response.status_code not in ignore ): self.log_request_fail( method, url, response.request.path_url, orig_body, duration, response.status_code, raw_data, ) self._raise_error(response.status_code, raw_data) self.log_request_success( method, url, response.request.path_url, orig_body, response.status_code, raw_data, duration, ) return response.status_code, response_headers, raw_data @property def headers(self): return self.session.headers def close(self): """ Explicitly closes connections """ self.session.close() elasticsearch-py-7.17.6/elasticsearch/connection/http_requests.pyi000066400000000000000000000030661426163262700255000ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Mapping, Optional import requests from .base import Connection class RequestsHttpConnection(Connection): session: requests.Session def __init__( self, host: str = ..., port: Optional[int] = ..., http_auth: Optional[Any] = ..., use_ssl: bool = ..., verify_certs: bool = ..., ssl_show_warn: bool = ..., ca_certs: Optional[Any] = ..., client_cert: Optional[Any] = ..., client_key: Optional[Any] = ..., headers: Optional[Mapping[str, str]] = ..., http_compress: Optional[bool] = ..., cloud_id: Optional[str] = ..., api_key: Optional[Any] = ..., opaque_id: Optional[str] = ..., meta_header: bool = ..., **kwargs: Any ) -> None: ... elasticsearch-py-7.17.6/elasticsearch/connection/http_urllib3.py000066400000000000000000000260621426163262700250310ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import ssl import time import warnings import urllib3 # type: ignore from urllib3.exceptions import ReadTimeoutError from urllib3.exceptions import SSLError as UrllibSSLError # type: ignore from urllib3.util.retry import Retry # type: ignore from ..compat import reraise_exceptions, to_str, urlencode from ..exceptions import ( ConnectionError, ConnectionTimeout, ImproperlyConfigured, SSLError, ) from ..utils import _client_meta_version from .base import Connection # sentinel value for `verify_certs` and `ssl_show_warn`. # This is used to detect if a user is passing in a value # for SSL kwargs if also using an SSLContext. VERIFY_CERTS_DEFAULT = object() SSL_SHOW_WARN_DEFAULT = object() CA_CERTS = None try: import certifi CA_CERTS = certifi.where() except ImportError: pass def create_ssl_context(**kwargs): """ A helper function around creating an SSL context https://docs.python.org/3/library/ssl.html#context-creation Accepts kwargs in the same manner as `create_default_context`. """ ctx = ssl.create_default_context(**kwargs) return ctx class Urllib3HttpConnection(Connection): """ Default connection class using the `urllib3` library and the http protocol. :arg host: hostname of the node (default: localhost) :arg port: port to use (integer, default: 9200) :arg url_prefix: optional url prefix for elasticsearch :arg timeout: default timeout in seconds (float, default: 10) :arg http_auth: optional http auth information as either ':' separated string or a tuple :arg use_ssl: use ssl for the connection if `True` :arg verify_certs: whether to verify SSL certificates :arg ssl_show_warn: show warning when verify certs is disabled :arg ca_certs: optional path to CA bundle. See https://urllib3.readthedocs.io/en/latest/security.html#using-certifi-with-urllib3 for instructions how to get default set :arg client_cert: path to the file containing the private key and the certificate, or cert only if using client_key :arg client_key: path to the file containing the private key if using separate cert and key files (client_cert will contain only the cert) :arg ssl_version: version of the SSL protocol to use. Choices are: SSLv23 (default) SSLv2 SSLv3 TLSv1 (see ``PROTOCOL_*`` constants in the ``ssl`` module for exact options for your environment). :arg ssl_assert_hostname: use hostname verification if not `False` :arg ssl_assert_fingerprint: verify the supplied certificate fingerprint if not `None` :arg maxsize: the number of connections which will be kept open to this host. See https://urllib3.readthedocs.io/en/1.4/pools.html#api for more information. :arg headers: any custom http headers to be add to requests :arg http_compress: Use gzip compression :arg cloud_id: The Cloud ID from ElasticCloud. Convenient way to connect to cloud instances. Other host connection params will be ignored. :arg api_key: optional API Key authentication as either base64 encoded string or a tuple. :arg opaque_id: Send this value in the 'X-Opaque-Id' HTTP header For tracing all requests made by this transport. """ HTTP_CLIENT_META = ("ur", _client_meta_version(urllib3.__version__)) def __init__( self, host="localhost", port=None, http_auth=None, use_ssl=False, verify_certs=VERIFY_CERTS_DEFAULT, ssl_show_warn=SSL_SHOW_WARN_DEFAULT, ca_certs=None, client_cert=None, client_key=None, ssl_version=None, ssl_assert_hostname=None, ssl_assert_fingerprint=None, maxsize=10, headers=None, ssl_context=None, http_compress=None, cloud_id=None, api_key=None, opaque_id=None, **kwargs ): # Initialize headers before calling super().__init__(). self.headers = urllib3.make_headers(keep_alive=True) super(Urllib3HttpConnection, self).__init__( host=host, port=port, use_ssl=use_ssl, headers=headers, http_compress=http_compress, cloud_id=cloud_id, api_key=api_key, opaque_id=opaque_id, **kwargs ) if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ":".join(http_auth) self.headers.update(urllib3.make_headers(basic_auth=http_auth)) pool_class = urllib3.HTTPConnectionPool kw = {} # if providing an SSL context, raise error if any other SSL related flag is used if ssl_context and ( (verify_certs is not VERIFY_CERTS_DEFAULT) or (ssl_show_warn is not SSL_SHOW_WARN_DEFAULT) or ca_certs or client_cert or client_key or ssl_version ): warnings.warn( "When using `ssl_context`, all other SSL related kwargs are ignored" ) # if ssl_context provided use SSL by default if ssl_context and self.use_ssl: pool_class = urllib3.HTTPSConnectionPool kw.update( { "assert_fingerprint": ssl_assert_fingerprint, "ssl_context": ssl_context, } ) elif self.use_ssl: pool_class = urllib3.HTTPSConnectionPool kw.update( { "ssl_version": ssl_version, "assert_hostname": ssl_assert_hostname, "assert_fingerprint": ssl_assert_fingerprint, } ) # Convert all sentinel values to their actual default # values if not using an SSLContext. if verify_certs is VERIFY_CERTS_DEFAULT: verify_certs = True if ssl_show_warn is SSL_SHOW_WARN_DEFAULT: ssl_show_warn = True ca_certs = CA_CERTS if ca_certs is None else ca_certs if verify_certs: if not ca_certs: raise ImproperlyConfigured( "Root certificates are missing for certificate " "validation. Either pass them in using the ca_certs parameter or " "install certifi to use it automatically." ) kw.update( { "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs, "cert_file": client_cert, "key_file": client_key, } ) else: kw["cert_reqs"] = "CERT_NONE" if ssl_show_warn: warnings.warn( "Connecting to %s using SSL with verify_certs=False is insecure." % self.host ) if not ssl_show_warn: urllib3.disable_warnings() self.pool = pool_class( self.hostname, port=self.port, timeout=self.timeout, maxsize=maxsize, **kw ) def perform_request( self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None ): url = self.url_prefix + url if params: url = "%s?%s" % (url, urlencode(params)) full_url = self.host + url start = time.time() orig_body = body try: kw = {} if timeout: kw["timeout"] = timeout # in python2 we need to make sure the url and method are not # unicode. Otherwise the body will be decoded into unicode too and # that will fail (#133, #201). if not isinstance(url, str): url = url.encode("utf-8") if not isinstance(method, str): method = method.encode("utf-8") request_headers = self.headers.copy() request_headers.update(headers or ()) request_headers = { to_str(header, "latin-1"): to_str(value, "latin-1") for header, value in request_headers.items() } if self.http_compress and body: body = self._gzip_compress(body) request_headers["content-encoding"] = "gzip" response = self.pool.urlopen( method, url, body, retries=Retry(False), headers=request_headers, **kw ) response_headers = { header.lower(): value for header, value in response.headers.items() } duration = time.time() - start raw_data = response.data content_type = response_headers.get("content-type", "") # The 'application/vnd.mapbox-vector-file' type shouldn't be # decoded into text, instead should be forwarded as bytes. if content_type != "application/vnd.mapbox-vector-tile": raw_data = raw_data.decode("utf-8", "surrogatepass") except reraise_exceptions: raise except Exception as e: self.log_request_fail( method, full_url, url, orig_body, time.time() - start, exception=e ) if isinstance(e, UrllibSSLError): raise SSLError("N/A", str(e), e) if isinstance(e, ReadTimeoutError): raise ConnectionTimeout("TIMEOUT", str(e), e) raise ConnectionError("N/A", str(e), e) # raise warnings if any from the 'Warnings' header. warning_headers = response.headers.get_all("warning", ()) self._raise_warnings(warning_headers) # raise errors based on http status codes, let the client handle those if needed if not (200 <= response.status < 300) and response.status not in ignore: self.log_request_fail( method, full_url, url, orig_body, duration, response.status, raw_data ) self._raise_error(response.status, raw_data) self.log_request_success( method, full_url, url, orig_body, response.status, raw_data, duration ) return response.status, response_headers, raw_data def close(self): """ Explicitly closes connection """ self.pool.close() elasticsearch-py-7.17.6/elasticsearch/connection/http_urllib3.pyi000066400000000000000000000037441426163262700252040ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import ssl from typing import Any, Mapping, Optional, Union import urllib3 from .base import Connection def create_ssl_context( cafile: Any = ..., capath: Any = ..., cadata: Any = ..., ) -> ssl.SSLContext: ... class Urllib3HttpConnection(Connection): pool: urllib3.HTTPConnectionPool def __init__( self, host: str = ..., port: Optional[int] = ..., url_prefix: str = ..., timeout: Optional[Union[float, int]] = ..., http_auth: Any = ..., use_ssl: bool = ..., verify_certs: bool = ..., ssl_show_warn: bool = ..., ca_certs: Optional[Any] = ..., client_cert: Optional[Any] = ..., client_key: Optional[Any] = ..., ssl_version: Optional[Any] = ..., ssl_assert_hostname: Optional[Any] = ..., ssl_assert_fingerprint: Optional[Any] = ..., maxsize: int = ..., headers: Optional[Mapping[str, str]] = ..., ssl_context: Optional[Any] = ..., http_compress: Optional[bool] = ..., cloud_id: Optional[str] = ..., api_key: Optional[Any] = ..., opaque_id: Optional[str] = ..., meta_header: bool = ..., **kwargs: Any ) -> None: ... elasticsearch-py-7.17.6/elasticsearch/connection/pooling.py000066400000000000000000000033461426163262700240650ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .base import Connection try: import queue except ImportError: import Queue as queue # type: ignore class PoolingConnection(Connection): """ Base connection class for connections that use libraries without thread safety and no capacity for connection pooling. To use this just implement a ``_make_connection`` method that constructs a new connection and returns it. """ def __init__(self, *args, **kwargs): self._free_connections = queue.Queue() super(PoolingConnection, self).__init__(*args, **kwargs) def _make_connection(self): raise NotImplementedError def _get_connection(self): try: return self._free_connections.get_nowait() except queue.Empty: return self._make_connection() def _release_connection(self, con): self._free_connections.put(con) def close(self): """ Explicitly close connection """ pass elasticsearch-py-7.17.6/elasticsearch/connection/pooling.pyi000066400000000000000000000020331426163262700242260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from .base import Connection class PoolingConnection(Connection): def _make_connection(self) -> Connection: ... def _get_connection(self) -> Connection: ... def _release_connection(self, con: Connection) -> None: ... def close(self) -> None: ... elasticsearch-py-7.17.6/elasticsearch/connection_pool.py000066400000000000000000000263611426163262700234510ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging import random import threading import time try: from Queue import Empty, PriorityQueue except ImportError: from queue import PriorityQueue, Empty from .exceptions import ImproperlyConfigured logger = logging.getLogger("elasticsearch") class ConnectionSelector(object): """ Simple class used to select a connection from a list of currently live connection instances. In init time it is passed a dictionary containing all the connections' options which it can then use during the selection process. When the `select` method is called it is given a list of *currently* live connections to choose from. The options dictionary is the one that has been passed to :class:`~elasticsearch.Transport` as `hosts` param and the same that is used to construct the Connection object itself. When the Connection was created from information retrieved from the cluster via the sniffing process it will be the dictionary returned by the `host_info_callback`. Example of where this would be useful is a zone-aware selector that would only select connections from it's own zones and only fall back to other connections where there would be none in it's zones. """ def __init__(self, opts): """ :arg opts: dictionary of connection instances and their options """ self.connection_opts = opts def select(self, connections): """ Select a connection from the given list. :arg connections: list of live connections to choose from """ pass class RandomSelector(ConnectionSelector): """ Select a connection at random """ def select(self, connections): return random.choice(connections) class RoundRobinSelector(ConnectionSelector): """ Selector using round-robin. """ def __init__(self, opts): super(RoundRobinSelector, self).__init__(opts) self.data = threading.local() def select(self, connections): self.data.rr = getattr(self.data, "rr", -1) + 1 self.data.rr %= len(connections) return connections[self.data.rr] class ConnectionPool(object): """ Container holding the :class:`~elasticsearch.Connection` instances, managing the selection process (via a :class:`~elasticsearch.ConnectionSelector`) and dead connections. It's only interactions are with the :class:`~elasticsearch.Transport` class that drives all the actions within `ConnectionPool`. Initially connections are stored on the class as a list and, along with the connection options, get passed to the `ConnectionSelector` instance for future reference. Upon each request the `Transport` will ask for a `Connection` via the `get_connection` method. If the connection fails (it's `perform_request` raises a `ConnectionError`) it will be marked as dead (via `mark_dead`) and put on a timeout (if it fails N times in a row the timeout is exponentially longer - the formula is `default_timeout * 2 ** (fail_count - 1)`). When the timeout is over the connection will be resurrected and returned to the live pool. A connection that has been previously marked as dead and succeeds will be marked as live (its fail count will be deleted). """ def __init__( self, connections, dead_timeout=60, timeout_cutoff=5, selector_class=RoundRobinSelector, randomize_hosts=True, **kwargs ): """ :arg connections: list of tuples containing the :class:`~elasticsearch.Connection` instance and it's options :arg dead_timeout: number of seconds a connection should be retired for after a failure, increases on consecutive failures :arg timeout_cutoff: number of consecutive failures after which the timeout doesn't increase :arg selector_class: :class:`~elasticsearch.ConnectionSelector` subclass to use if more than one connection is live :arg randomize_hosts: shuffle the list of connections upon arrival to avoid dog piling effect across processes """ if not connections: raise ImproperlyConfigured( "No defined connections, you need to " "specify at least one host." ) self.connection_opts = connections self.connections = [c for (c, opts) in connections] # remember original connection list for resurrect(force=True) self.orig_connections = tuple(self.connections) # PriorityQueue for thread safety and ease of timeout management self.dead = PriorityQueue(len(self.connections)) self.dead_count = {} if randomize_hosts: # randomize the connection list to avoid all clients hitting same node # after startup/restart random.shuffle(self.connections) # default timeout after which to try resurrecting a connection self.dead_timeout = dead_timeout self.timeout_cutoff = timeout_cutoff self.selector = selector_class(dict(connections)) def mark_dead(self, connection, now=None): """ Mark the connection as dead (failed). Remove it from the live pool and put it on a timeout. :arg connection: the failed instance """ # allow inject for testing purposes now = now if now else time.time() try: self.connections.remove(connection) except ValueError: logger.info( "Attempted to remove %r, but it does not exist in the connection pool.", connection, ) # connection not alive or another thread marked it already, ignore return else: dead_count = self.dead_count.get(connection, 0) + 1 self.dead_count[connection] = dead_count timeout = self.dead_timeout * 2 ** min(dead_count - 1, self.timeout_cutoff) self.dead.put((now + timeout, connection)) logger.warning( "Connection %r has failed for %i times in a row, putting on %i second timeout.", connection, dead_count, timeout, ) def mark_live(self, connection): """ Mark connection as healthy after a resurrection. Resets the fail counter for the connection. :arg connection: the connection to redeem """ try: del self.dead_count[connection] except KeyError: # race condition, safe to ignore pass def resurrect(self, force=False): """ Attempt to resurrect a connection from the dead pool. It will try to locate one (not all) eligible (it's timeout is over) connection to return to the live pool. Any resurrected connection is also returned. :arg force: resurrect a connection even if there is none eligible (used when we have no live connections). If force is specified resurrect always returns a connection. """ # no dead connections if self.dead.empty(): # we are forced to return a connection, take one from the original # list. This is to avoid a race condition where get_connection can # see no live connections but when it calls resurrect self.dead is # also empty. We assume that other threat has resurrected all # available connections so we can safely return one at random. if force: return random.choice(self.orig_connections) return try: # retrieve a connection to check timeout, connection = self.dead.get(block=False) except Empty: # other thread has been faster and the queue is now empty. If we # are forced, return a connection at random again. if force: return random.choice(self.orig_connections) return if not force and timeout > time.time(): # return it back if not eligible and not forced self.dead.put((timeout, connection)) return # either we were forced or the connection is elligible to be retried self.connections.append(connection) logger.info("Resurrecting connection %r (force=%s).", connection, force) return connection def get_connection(self): """ Return a connection from the pool using the `ConnectionSelector` instance. It tries to resurrect eligible connections, forces a resurrection when no connections are available and passes the list of live connections to the selector instance to choose from. Returns a connection instance and it's current fail count. """ self.resurrect() connections = self.connections[:] # no live nodes, resurrect one by force and return it if not connections: return self.resurrect(True) # only call selector if we have a selection if len(connections) > 1: return self.selector.select(connections) # only one connection, no need for a selector return connections[0] def close(self): """ Explicitly closes connections """ for conn in self.connections: conn.close() def __repr__(self): return "<%s: %r>" % (type(self).__name__, self.connections) class DummyConnectionPool(ConnectionPool): def __init__(self, connections, **kwargs): if len(connections) != 1: raise ImproperlyConfigured( "DummyConnectionPool needs exactly one " "connection defined." ) # we need connection opts for sniffing logic self.connection_opts = connections self.connection = connections[0][0] self.connections = (self.connection,) def get_connection(self): return self.connection def close(self): """ Explicitly closes connections """ self.connection.close() def _noop(self, *args, **kwargs): pass mark_dead = mark_live = resurrect = _noop class EmptyConnectionPool(ConnectionPool): """A connection pool that is empty. Errors out if used.""" def __init__(self, *_, **__): self.connections = [] self.connection_opts = [] def get_connection(self): raise ImproperlyConfigured("No connections were configured") def _noop(self, *args, **kwargs): pass close = mark_dead = mark_live = resurrect = _noop elasticsearch-py-7.17.6/elasticsearch/connection_pool.pyi000066400000000000000000000055651426163262700236250ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union from .connection import Connection try: from Queue import PriorityQueue # type: ignore except ImportError: from queue import PriorityQueue logger: logging.Logger class ConnectionSelector(object): connection_opts: Sequence[Tuple[Connection, Any]] def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: ... def select(self, connections: Sequence[Connection]) -> Connection: ... class RandomSelector(ConnectionSelector): ... class RoundRobinSelector(ConnectionSelector): ... class ConnectionPool(object): connections_opts: Sequence[Tuple[Connection, Any]] connections: Sequence[Connection] orig_connections: Tuple[Connection, ...] dead: PriorityQueue dead_count: Dict[Connection, int] dead_timeout: float timeout_cutoff: int selector: ConnectionSelector def __init__( self, connections: Sequence[Tuple[Connection, Any]], dead_timeout: float = ..., timeout_cutoff: int = ..., selector_class: Type[ConnectionSelector] = ..., randomize_hosts: bool = ..., **kwargs: Any ) -> None: ... def mark_dead(self, connection: Connection, now: Optional[float] = ...) -> None: ... def mark_live(self, connection: Connection) -> None: ... def resurrect(self, force: bool = ...) -> Optional[Connection]: ... def get_connection(self) -> Connection: ... def close(self) -> None: ... def __repr__(self) -> str: ... class DummyConnectionPool(ConnectionPool): def __init__( self, connections: Sequence[Tuple[Connection, Any]], **kwargs: Any ) -> None: ... def get_connection(self) -> Connection: ... def close(self) -> None: ... def _noop(self, *args: Any, **kwargs: Any) -> Any: ... mark_dead = mark_live = resurrect = _noop class EmptyConnectionPool(ConnectionPool): def __init__(self, *_: Any, **__: Any) -> None: ... def get_connection(self) -> Connection: ... def _noop(self, *args: Any, **kwargs: Any) -> Any: ... close = mark_dead = mark_live = resurrect = _noop elasticsearch-py-7.17.6/elasticsearch/exceptions.py000066400000000000000000000122231426163262700224320ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. __all__ = [ "ImproperlyConfigured", "ElasticsearchException", "SerializationError", "TransportError", "NotFoundError", "ConflictError", "RequestError", "ConnectionError", "SSLError", "ConnectionTimeout", "AuthenticationException", "AuthorizationException", ] class ImproperlyConfigured(Exception): """ Exception raised when the config passed to the client is inconsistent or invalid. """ class ElasticsearchException(Exception): """ Base class for all exceptions raised by this package's operations (doesn't apply to :class:`~elasticsearch.ImproperlyConfigured`). """ class SerializationError(ElasticsearchException): """ Data passed in failed to serialize properly in the ``Serializer`` being used. """ class UnsupportedProductError(ElasticsearchException): """Error which is raised when the client detects it's not connected to a supported product. """ class TransportError(ElasticsearchException): """ Exception raised when ES returns a non-OK (>=400) HTTP status code. Or when an actual connection error happens; in that case the ``status_code`` will be set to ``'N/A'``. """ @property def status_code(self): """ The HTTP status code of the response that precipitated the error or ``'N/A'`` if not applicable. """ return self.args[0] @property def error(self): """A string error message.""" return self.args[1] @property def info(self): """ Dict of returned error info from ES, where available, underlying exception when not. """ return self.args[2] def __str__(self): cause = "" try: if self.info and "error" in self.info: if isinstance(self.info["error"], dict): root_cause = self.info["error"]["root_cause"][0] cause = ", ".join( filter( None, [ repr(root_cause["reason"]), root_cause.get("resource.id"), root_cause.get("resource.type"), ], ) ) else: cause = repr(self.info["error"]) except LookupError: pass msg = ", ".join(filter(None, [str(self.status_code), repr(self.error), cause])) return "%s(%s)" % (self.__class__.__name__, msg) class ConnectionError(TransportError): """ Error raised when there was an exception while talking to ES. Original exception from the underlying :class:`~elasticsearch.Connection` implementation is available as ``.info``. """ def __str__(self): return "ConnectionError(%s) caused by: %s(%s)" % ( self.error, self.info.__class__.__name__, self.info, ) class SSLError(ConnectionError): """Error raised when encountering SSL errors.""" class ConnectionTimeout(ConnectionError): """A network timeout. Doesn't cause a node retry by default.""" def __str__(self): return "ConnectionTimeout caused by - %s(%s)" % ( self.info.__class__.__name__, self.info, ) class NotFoundError(TransportError): """Exception representing a 404 status code.""" class ConflictError(TransportError): """Exception representing a 409 status code.""" class RequestError(TransportError): """Exception representing a 400 status code.""" class AuthenticationException(TransportError): """Exception representing a 401 status code.""" class AuthorizationException(TransportError): """Exception representing a 403 status code.""" class ElasticsearchWarning(Warning): """Warning that is raised when a deprecated option or incorrect usage is flagged via the 'Warning' HTTP header. """ # Alias of 'ElasticsearchWarning' for backwards compatibility. # Additional functionality was added to the 'Warning' HTTP header # not related to deprecations. ElasticsearchDeprecationWarning = ElasticsearchWarning # more generic mappings from status_code to python exceptions HTTP_EXCEPTIONS = { 400: RequestError, 401: AuthenticationException, 403: AuthorizationException, 404: NotFoundError, 409: ConflictError, } elasticsearch-py-7.17.6/elasticsearch/exceptions.pyi000066400000000000000000000035021426163262700226030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Dict, Union class ImproperlyConfigured(Exception): ... class ElasticsearchException(Exception): ... class SerializationError(ElasticsearchException): ... class UnsupportedProductError(ElasticsearchException): ... class TransportError(ElasticsearchException): @property def status_code(self) -> Union[str, int]: ... @property def error(self) -> str: ... @property def info(self) -> Union[Dict[str, Any], Exception, Any]: ... def __str__(self) -> str: ... class ConnectionError(TransportError): def __str__(self) -> str: ... class SSLError(ConnectionError): ... class ConnectionTimeout(ConnectionError): def __str__(self) -> str: ... class NotFoundError(TransportError): ... class ConflictError(TransportError): ... class RequestError(TransportError): ... class AuthenticationException(TransportError): ... class AuthorizationException(TransportError): ... class ElasticsearchWarning(Warning): ... ElasticsearchDeprecationWarning = ElasticsearchWarning HTTP_EXCEPTIONS: Dict[int, ElasticsearchException] elasticsearch-py-7.17.6/elasticsearch/helpers/000077500000000000000000000000001426163262700213415ustar00rootroot00000000000000elasticsearch-py-7.17.6/elasticsearch/helpers/__init__.py000066400000000000000000000030651426163262700234560ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys from .actions import ( _chunk_actions, _process_bulk_chunk, bulk, expand_action, parallel_bulk, reindex, scan, streaming_bulk, ) from .errors import BulkIndexError, ScanError __all__ = [ "BulkIndexError", "ScanError", "expand_action", "streaming_bulk", "bulk", "parallel_bulk", "scan", "reindex", "_chunk_actions", "_process_bulk_chunk", ] try: # Asyncio only supported on Python 3.6+ if sys.version_info < (3, 6): raise ImportError from .._async.helpers import ( async_bulk, async_reindex, async_scan, async_streaming_bulk, ) __all__ += ["async_scan", "async_bulk", "async_reindex", "async_streaming_bulk"] except (ImportError, SyntaxError): pass elasticsearch-py-7.17.6/elasticsearch/helpers/__init__.pyi000066400000000000000000000032351426163262700236260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys from .actions import _chunk_actions as _chunk_actions from .actions import _process_bulk_chunk as _process_bulk_chunk from .actions import bulk as bulk from .actions import expand_action as expand_action from .actions import parallel_bulk as parallel_bulk from .actions import reindex as reindex from .actions import scan as scan from .actions import streaming_bulk as streaming_bulk from .errors import BulkIndexError as BulkIndexError from .errors import ScanError as ScanError try: # Asyncio only supported on Python 3.6+ if sys.version_info < (3, 6): raise ImportError from .._async.helpers import async_bulk as async_bulk from .._async.helpers import async_reindex as async_reindex from .._async.helpers import async_scan as async_scan from .._async.helpers import async_streaming_bulk as async_streaming_bulk except (ImportError, SyntaxError): pass elasticsearch-py-7.17.6/elasticsearch/helpers/actions.py000066400000000000000000000630211426163262700233550ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging import time from operator import methodcaller from ..compat import Mapping, Queue, map, string_types from ..exceptions import NotFoundError, TransportError from .errors import BulkIndexError, ScanError logger = logging.getLogger("elasticsearch.helpers") def expand_action(data): """ From one document or action definition passed in by the user extract the action/data lines needed for elasticsearch's :meth:`~elasticsearch.Elasticsearch.bulk` api. """ # when given a string, assume user wants to index raw json if isinstance(data, string_types): return '{"index":{}}', data # make sure we don't alter the action data = data.copy() op_type = data.pop("_op_type", "index") action = {op_type: {}} # If '_source' is a dict use it for source # otherwise if op_type == 'update' then # '_source' should be in the metadata. if ( op_type == "update" and "_source" in data and not isinstance(data["_source"], Mapping) ): action[op_type]["_source"] = data.pop("_source") for key in ( "_id", "_index", "_if_seq_no", "_if_primary_term", "_parent", "_percolate", "_retry_on_conflict", "_routing", "_timestamp", "_type", "_version", "_version_type", "if_seq_no", "if_primary_term", "parent", "pipeline", "retry_on_conflict", "routing", "version", "version_type", ): if key in data: if key in { "_if_seq_no", "_if_primary_term", "_parent", "_retry_on_conflict", "_routing", "_version", "_version_type", }: action[op_type][key[1:]] = data.pop(key) else: action[op_type][key] = data.pop(key) # no data payload for delete if op_type == "delete": return action, None return action, data.get("_source", data) class _ActionChunker: def __init__(self, chunk_size, max_chunk_bytes, serializer): self.chunk_size = chunk_size self.max_chunk_bytes = max_chunk_bytes self.serializer = serializer self.size = 0 self.action_count = 0 self.bulk_actions = [] self.bulk_data = [] def feed(self, action, data): ret = None raw_data, raw_action = data, action action = self.serializer.dumps(action) # +1 to account for the trailing new line character cur_size = len(action.encode("utf-8")) + 1 if data is not None: data = self.serializer.dumps(data) cur_size += len(data.encode("utf-8")) + 1 # full chunk, send it and start a new one if self.bulk_actions and ( self.size + cur_size > self.max_chunk_bytes or self.action_count == self.chunk_size ): ret = (self.bulk_data, self.bulk_actions) self.bulk_actions, self.bulk_data = [], [] self.size, self.action_count = 0, 0 self.bulk_actions.append(action) if data is not None: self.bulk_actions.append(data) self.bulk_data.append((raw_action, raw_data)) else: self.bulk_data.append((raw_action,)) self.size += cur_size self.action_count += 1 return ret def flush(self): ret = None if self.bulk_actions: ret = (self.bulk_data, self.bulk_actions) self.bulk_actions, self.bulk_data = [], [] return ret def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): """ Split actions into chunks by number or size, serialize them into strings in the process. """ chunker = _ActionChunker( chunk_size=chunk_size, max_chunk_bytes=max_chunk_bytes, serializer=serializer ) for action, data in actions: ret = chunker.feed(action, data) if ret: yield ret ret = chunker.flush() if ret: yield ret def _process_bulk_chunk_success(resp, bulk_data, ignore_status, raise_on_error=True): # if raise on error is set, we need to collect errors per chunk before raising them errors = [] # go through request-response pairs and detect failures for data, (op_type, item) in zip( bulk_data, map(methodcaller("popitem"), resp["items"]) ): status_code = item.get("status", 500) ok = 200 <= status_code < 300 if not ok and raise_on_error and status_code not in ignore_status: # include original document source if len(data) > 1: item["data"] = data[1] errors.append({op_type: item}) if ok or not errors: # if we are not just recording all errors to be able to raise # them all at once, yield items individually yield ok, {op_type: item} if errors: raise BulkIndexError("%i document(s) failed to index." % len(errors), errors) def _process_bulk_chunk_error( error, bulk_data, ignore_status, raise_on_exception=True, raise_on_error=True ): # default behavior - just propagate exception if raise_on_exception and error.status_code not in ignore_status: raise error # if we are not propagating, mark all actions in current chunk as failed err_message = str(error) exc_errors = [] for data in bulk_data: # collect all the information about failed actions op_type, action = data[0].copy().popitem() info = {"error": err_message, "status": error.status_code, "exception": error} if op_type != "delete": info["data"] = data[1] info.update(action) exc_errors.append({op_type: info}) # emulate standard behavior for failed actions if raise_on_error and error.status_code not in ignore_status: raise BulkIndexError( "%i document(s) failed to index." % len(exc_errors), exc_errors ) else: for err in exc_errors: yield False, err def _process_bulk_chunk( client, bulk_actions, bulk_data, raise_on_exception=True, raise_on_error=True, ignore_status=(), *args, **kwargs ): """ Send a bulk request to elasticsearch and process the output. """ kwargs = _add_helper_meta_to_kwargs(kwargs, "bp") if not isinstance(ignore_status, (list, tuple)): ignore_status = (ignore_status,) try: # send the actual request resp = client.bulk(*args, body="\n".join(bulk_actions) + "\n", **kwargs) except TransportError as e: gen = _process_bulk_chunk_error( error=e, bulk_data=bulk_data, ignore_status=ignore_status, raise_on_exception=raise_on_exception, raise_on_error=raise_on_error, ) else: gen = _process_bulk_chunk_success( resp=resp, bulk_data=bulk_data, ignore_status=ignore_status, raise_on_error=raise_on_error, ) for item in gen: yield item def _add_helper_meta_to_kwargs(kwargs, helper_meta): params = (kwargs or {}).pop("params", {}) params["__elastic_client_meta"] = (("h", helper_meta),) kwargs["params"] = params return kwargs def streaming_bulk( client, actions, chunk_size=500, max_chunk_bytes=100 * 1024 * 1024, raise_on_error=True, expand_action_callback=expand_action, raise_on_exception=True, max_retries=0, initial_backoff=2, max_backoff=600, yield_ok=True, ignore_status=(), *args, **kwargs ): """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use :func:`~elasticsearch.helpers.bulk` which is a wrapper around streaming bulk that returns summary information about the bulk operation once the entire input is consumed and sent. If you specify ``max_retries`` it will also retry any documents that were rejected with a ``429`` status code. To do this it will wait (**by calling time.sleep which will block**) for ``initial_backoff`` seconds and then, every subsequent rejection for the same chunk, for double the time every time up to ``max_backoff`` seconds. :arg client: instance of :class:`~elasticsearch.Elasticsearch` to use :arg actions: iterable containing the actions to be executed :arg chunk_size: number of docs in one chunk sent to es (default: 500) :arg max_chunk_bytes: the maximum size of the request in bytes (default: 100MB) :arg raise_on_error: raise ``BulkIndexError`` containing errors (as `.errors`) from the execution of the last chunk when some occur. By default we raise. :arg raise_on_exception: if ``False`` then don't propagate exceptions from call to ``bulk`` and just report the items that failed as failed. :arg expand_action_callback: callback executed on each action passed in, should return a tuple containing the action line and the data line (`None` if data line should be omitted). :arg max_retries: maximum number of times a document will be retried when ``429`` is received, set to 0 (default) for no retries on ``429`` :arg initial_backoff: number of seconds we should wait before the first retry. Any subsequent retries will be powers of ``initial_backoff * 2**retry_number`` :arg max_backoff: maximum number of seconds a retry will wait :arg yield_ok: if set to False will skip successful documents in the output :arg ignore_status: list of HTTP status code that you want to ignore """ actions = map(expand_action_callback, actions) for bulk_data, bulk_actions in _chunk_actions( actions, chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): to_retry, to_retry_data = [], [] if attempt: time.sleep(min(max_backoff, initial_backoff * 2 ** (attempt - 1))) try: for data, (ok, info) in zip( bulk_data, _process_bulk_chunk( client, bulk_actions, bulk_data, raise_on_exception, raise_on_error, ignore_status, *args, **kwargs ), ): if not ok: action, info = info.popitem() # retry if retries enabled, we get 429, and we are not # in the last attempt if ( max_retries and info["status"] == 429 and (attempt + 1) <= max_retries ): # _process_bulk_chunk expects strings so we need to # re-serialize the data to_retry.extend( map(client.transport.serializer.dumps, data) ) to_retry_data.append(data) else: yield ok, {action: info} elif yield_ok: yield ok, info except TransportError as e: # suppress 429 errors since we will retry them if attempt == max_retries or e.status_code != 429: raise else: if not to_retry: break # retry only subset of documents that didn't succeed bulk_actions, bulk_data = to_retry, to_retry_data def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): """ Helper for the :meth:`~elasticsearch.Elasticsearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and sends them to elasticsearch in chunks. It returns a tuple with summary information - number of successfully executed actions and either list of errors or number of errors if ``stats_only`` is set to ``True``. Note that by default we raise a ``BulkIndexError`` when we encounter an error so options like ``stats_only`` only apply when ``raise_on_error`` is set to ``False``. When errors are being collected original document data is included in the error dictionary which can lead to an extra high memory usage. If you need to process a lot of data and want to ignore/collect errors please consider using the :func:`~elasticsearch.helpers.streaming_bulk` helper which will just return the errors and not store them in memory. :arg client: instance of :class:`~elasticsearch.Elasticsearch` to use :arg actions: iterator containing the actions :arg stats_only: if `True` only report number of successful/failed operations instead of just number of successful and a list of error responses :arg ignore_status: list of HTTP status code that you want to ignore Any additional keyword arguments will be passed to :func:`~elasticsearch.helpers.streaming_bulk` which is used to execute the operation, see :func:`~elasticsearch.helpers.streaming_bulk` for more accepted parameters. """ success, failed = 0, 0 # list of errors to be collected is not stats_only errors = [] # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True for ok, item in streaming_bulk( client, actions, ignore_status=ignore_status, *args, **kwargs ): # go through request-response pairs and detect failures if not ok: if not stats_only: errors.append(item) failed += 1 else: success += 1 return success, failed if stats_only else errors def parallel_bulk( client, actions, thread_count=4, chunk_size=500, max_chunk_bytes=100 * 1024 * 1024, queue_size=4, expand_action_callback=expand_action, ignore_status=(), *args, **kwargs ): """ Parallel version of the bulk helper run in multiple threads at once. :arg client: instance of :class:`~elasticsearch.Elasticsearch` to use :arg actions: iterator containing the actions :arg thread_count: size of the threadpool to use for the bulk requests :arg chunk_size: number of docs in one chunk sent to es (default: 500) :arg max_chunk_bytes: the maximum size of the request in bytes (default: 100MB) :arg raise_on_error: raise ``BulkIndexError`` containing errors (as `.errors`) from the execution of the last chunk when some occur. By default we raise. :arg raise_on_exception: if ``False`` then don't propagate exceptions from call to ``bulk`` and just report the items that failed as failed. :arg expand_action_callback: callback executed on each action passed in, should return a tuple containing the action line and the data line (`None` if data line should be omitted). :arg queue_size: size of the task queue between the main thread (producing chunks to send) and the processing threads. :arg ignore_status: list of HTTP status code that you want to ignore """ # Avoid importing multiprocessing unless parallel_bulk is used # to avoid exceptions on restricted environments like App Engine from multiprocessing.pool import ThreadPool actions = map(expand_action_callback, actions) class BlockingPool(ThreadPool): def _setup_queues(self): super(BlockingPool, self)._setup_queues() # type: ignore # The queue must be at least the size of the number of threads to # prevent hanging when inserting sentinel values during teardown. self._inqueue = Queue(max(queue_size, thread_count)) self._quick_put = self._inqueue.put pool = BlockingPool(thread_count) try: for result in pool.imap( lambda bulk_chunk: list( _process_bulk_chunk( client, bulk_chunk[1], bulk_chunk[0], ignore_status=ignore_status, *args, **kwargs ) ), _chunk_actions( actions, chunk_size, max_chunk_bytes, client.transport.serializer ), ): for item in result: yield item finally: pool.close() pool.join() def scan( client, query=None, scroll="5m", raise_on_error=True, preserve_order=False, size=1000, request_timeout=None, clear_scroll=True, scroll_kwargs=None, **kwargs ): """ Simple abstraction on top of the :meth:`~elasticsearch.Elasticsearch.scroll` api - a simple iterator that yields all hits as returned by underlining scroll requests. By default scan does not return results in any pre-determined order. To have a standard order in the returned documents (either by score or explicit sort definition) when scrolling, use ``preserve_order=True``. This may be an expensive operation and will negate the performance benefits of using ``scan``. :arg client: instance of :class:`~elasticsearch.Elasticsearch` to use :arg query: body for the :meth:`~elasticsearch.Elasticsearch.search` api :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg raise_on_error: raises an exception (``ScanError``) if an error is encountered (some shards fail to execute). By default we raise. :arg preserve_order: don't set the ``search_type`` to ``scan`` - this will cause the scroll to paginate with preserving the order. Note that this can be an extremely expensive operation and can easily lead to unpredictable results, use with caution. :arg size: size (per shard) of the batch send at each iteration. :arg request_timeout: explicit timeout for each call to ``scan`` :arg clear_scroll: explicitly calls delete on the scroll id via the clear scroll API at the end of the method on completion or error, defaults to true. :arg scroll_kwargs: additional kwargs to be passed to :meth:`~elasticsearch.Elasticsearch.scroll` Any additional keyword arguments will be passed to the initial :meth:`~elasticsearch.Elasticsearch.search` call:: scan(es, query={"query": {"match": {"title": "python"}}}, index="orders-*", doc_type="books" ) """ scroll_kwargs = scroll_kwargs.copy() if scroll_kwargs else {} scroll_kwargs["scroll"] = scroll _add_helper_meta_to_kwargs(scroll_kwargs, "s") if not preserve_order: query = query.copy() if query else {} query["sort"] = "_doc" # Grab options that should be propagated to every # API call within this helper instead of just 'search()' transport_kwargs = {} for key in ("headers", "api_key", "http_auth"): if key in kwargs: transport_kwargs[key] = kwargs[key] # If the user is using 'scroll_kwargs' we want # to propagate there too, but to not break backwards # compatibility we'll not override anything already given. if scroll_kwargs is not None and transport_kwargs: for key, val in transport_kwargs.items(): scroll_kwargs.setdefault(key, val) # initial search search_kwargs = kwargs.copy() # Setting query={"from": ...} would make 'from' be used # as a keyword argument instead of 'from_'. We handle that here. if "from" in search_kwargs: search_kwargs["from_"] = search_kwargs.pop("from") if query: search_kwargs.update(query) if "from" in search_kwargs: search_kwargs["from_"] = search_kwargs.pop("from") search_kwargs["scroll"] = scroll search_kwargs["size"] = size search_kwargs["request_timeout"] = request_timeout _add_helper_meta_to_kwargs(search_kwargs, "s") resp = client.search(**search_kwargs) scroll_id = resp.get("_scroll_id") try: while scroll_id and resp["hits"]["hits"]: for hit in resp["hits"]["hits"]: yield hit # Default to 0 if the value isn't included in the response shards_successful = resp["_shards"].get("successful", 0) shards_skipped = resp["_shards"].get("skipped", 0) shards_total = resp["_shards"].get("total", 0) # check if we have any errors if (shards_successful + shards_skipped) < shards_total: shards_message = "Scroll request has only succeeded on %d (+%d skipped) shards out of %d." logger.warning( shards_message, shards_successful, shards_skipped, shards_total, ) if raise_on_error: raise ScanError( scroll_id, shards_message % ( shards_successful, shards_skipped, shards_total, ), ) scroll_kwargs["scroll_id"] = scroll_id resp = client.scroll(**scroll_kwargs) scroll_id = resp.get("_scroll_id") finally: if scroll_id and clear_scroll: client.clear_scroll( scroll_id=scroll_id, ignore=(404,), params={"__elastic_client_meta": (("h", "s"),)}, **transport_kwargs ) def reindex( client, source_index, target_index, query=None, target_client=None, chunk_size=500, scroll="5m", op_type=None, scan_kwargs={}, bulk_kwargs={}, ): """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. If you don't specify the query you will reindex all the documents. Since ``2.3`` a :meth:`~elasticsearch.Elasticsearch.reindex` api is available as part of elasticsearch itself. It is recommended to use the api instead of this helper wherever possible. The helper is here mostly for backwards compatibility and for situations where more flexibility is needed. .. note:: This helper doesn't transfer mappings, just the data. :arg client: instance of :class:`~elasticsearch.Elasticsearch` to use (for read if `target_client` is specified as well) :arg source_index: index (or list of indices) to read documents from :arg target_index: name of the index in the target cluster to populate :arg query: body for the :meth:`~elasticsearch.Elasticsearch.search` api :arg target_client: optional, is specified will be used for writing (thus enabling reindex between clusters) :arg chunk_size: number of docs in one chunk sent to es (default: 500) :arg scroll: Specify how long a consistent view of the index should be maintained for scrolled search :arg op_type: Explicit operation type. Defaults to '_index'. Data streams must be set to 'create'. If not specified, will auto-detect if target_index is a data stream. :arg scan_kwargs: additional kwargs to be passed to :func:`~elasticsearch.helpers.scan` :arg bulk_kwargs: additional kwargs to be passed to :func:`~elasticsearch.helpers.bulk` """ target_client = client if target_client is None else target_client docs = scan(client, query=query, index=source_index, scroll=scroll, **scan_kwargs) def _change_doc_index(hits, index, op_type): for h in hits: h["_index"] = index if op_type is not None: h["_op_type"] = op_type if "fields" in h: h.update(h.pop("fields")) yield h kwargs = {"stats_only": True} kwargs.update(bulk_kwargs) is_data_stream = False try: # Verify if the target_index is data stream or index data_streams = target_client.indices.get_data_stream( target_index, expand_wildcards="all" ) is_data_stream = any( data_stream["name"] == target_index for data_stream in data_streams["data_streams"] ) except (TransportError, KeyError, NotFoundError): # If its not data stream, might be index pass if is_data_stream: if op_type not in (None, "create"): raise ValueError("Data streams must have 'op_type' set to 'create'") else: op_type = "create" return bulk( target_client, _change_doc_index(docs, target_index, op_type), chunk_size=chunk_size, **kwargs ) elasticsearch-py-7.17.6/elasticsearch/helpers/actions.pyi000066400000000000000000000067761426163262700235440ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging from typing import ( Any, AsyncIterable, Callable, Collection, Dict, Generator, Iterable, List, Mapping, Optional, Tuple, Union, ) from ..client import Elasticsearch from ..serializer import Serializer logger: logging.Logger def expand_action(data: Any) -> Tuple[Dict[str, Any], Optional[Any]]: ... def _chunk_actions( actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer ) -> Generator[Any, None, None]: ... def _process_bulk_chunk( client: Elasticsearch, bulk_actions: Any, bulk_data: Any, raise_on_exception: bool = ..., raise_on_error: bool = ..., *args: Any, **kwargs: Any ) -> Generator[Tuple[bool, Any], None, None]: ... def streaming_bulk( client: Elasticsearch, actions: Union[Iterable[Any], AsyncIterable[Any]], chunk_size: int = ..., max_chunk_bytes: int = ..., raise_on_error: bool = ..., expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., raise_on_exception: bool = ..., max_retries: int = ..., initial_backoff: Union[float, int] = ..., max_backoff: Union[float, int] = ..., yield_ok: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, **kwargs: Any ) -> Generator[Tuple[bool, Any], None, None]: ... def bulk( client: Elasticsearch, actions: Iterable[Any], stats_only: bool = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, **kwargs: Any ) -> Tuple[int, Union[int, List[Any]]]: ... def parallel_bulk( client: Elasticsearch, actions: Iterable[Any], thread_count: int = ..., chunk_size: int = ..., max_chunk_bytes: int = ..., queue_size: int = ..., expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., ignore_status: Optional[Union[int, Collection[int]]] = ..., *args: Any, **kwargs: Any ) -> Generator[Tuple[bool, Any], None, None]: ... def scan( client: Elasticsearch, query: Optional[Any] = ..., scroll: str = ..., raise_on_error: bool = ..., preserve_order: bool = ..., size: int = ..., request_timeout: Optional[Union[float, int]] = ..., clear_scroll: bool = ..., scroll_kwargs: Optional[Mapping[str, Any]] = ..., **kwargs: Any ) -> Generator[Any, None, None]: ... def reindex( client: Elasticsearch, source_index: Union[str, Collection[str]], target_index: str, query: Any = ..., target_client: Optional[Elasticsearch] = ..., chunk_size: int = ..., scroll: str = ..., op_type: str = ..., scan_kwargs: Optional[Mapping[str, Any]] = ..., bulk_kwargs: Optional[Mapping[str, Any]] = ..., ) -> Tuple[int, Union[int, List[Any]]]: ... elasticsearch-py-7.17.6/elasticsearch/helpers/errors.py000066400000000000000000000022761426163262700232360ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from ..exceptions import ElasticsearchException class BulkIndexError(ElasticsearchException): @property def errors(self): """List of errors from execution of the last chunk.""" return self.args[1] class ScanError(ElasticsearchException): def __init__(self, scroll_id, *args, **kwargs): super(ScanError, self).__init__(*args, **kwargs) # type: ignore self.scroll_id = scroll_id elasticsearch-py-7.17.6/elasticsearch/helpers/errors.pyi000066400000000000000000000021221426163262700233750ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, List from ..exceptions import ElasticsearchException class BulkIndexError(ElasticsearchException): @property def errors(self) -> List[Any]: ... class ScanError(ElasticsearchException): scroll_id: str def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: ... elasticsearch-py-7.17.6/elasticsearch/helpers/test.py000066400000000000000000000061401426163262700226730ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # type: ignore import os import time from os.path import abspath, dirname, join from unittest import SkipTest, TestCase from elasticsearch import Elasticsearch from elasticsearch.exceptions import ConnectionError if "ELASTICSEARCH_URL" in os.environ: ELASTICSEARCH_URL = os.environ["ELASTICSEARCH_URL"] else: ELASTICSEARCH_URL = "https://elastic:changeme@localhost:9200" CA_CERTS = join(dirname(dirname(dirname(abspath(__file__)))), ".ci/certs/ca.pem") def get_test_client(nowait=False, **kwargs): # construct kwargs from the environment kw = {"timeout": 30, "ca_certs": CA_CERTS} if "PYTHON_CONNECTION_CLASS" in os.environ: from elasticsearch import connection kw["connection_class"] = getattr( connection, os.environ["PYTHON_CONNECTION_CLASS"] ) kw.update(kwargs) client = Elasticsearch(ELASTICSEARCH_URL, **kw) # wait for yellow status for _ in range(1 if nowait else 100): try: client.cluster.health(wait_for_status="yellow") return client except ConnectionError: time.sleep(0.1) else: # timeout raise SkipTest("Elasticsearch failed to start.") class ElasticsearchTestCase(TestCase): @staticmethod def _get_client(): return get_test_client() @classmethod def setup_class(cls): cls.client = cls._get_client() def teardown_method(self, _): # Hidden indices expanded in wildcards in ES 7.7 expand_wildcards = ["open", "closed"] if self.es_version() >= (7, 7): expand_wildcards.append("hidden") self.client.indices.delete_data_stream( name="*", ignore=404, expand_wildcards=expand_wildcards ) self.client.indices.delete( index="*", ignore=404, expand_wildcards=expand_wildcards ) self.client.indices.delete_template(name="*", ignore=404) def es_version(self): if not hasattr(self, "_es_version"): self._es_version = es_version(self.client) return self._es_version def _get_version(version_string): if "." not in version_string: return () version = version_string.strip().split(".") return tuple(int(v) if v.isdigit() else 999 for v in version) def es_version(client): return _get_version(client.info()["version"]["number"]) elasticsearch-py-7.17.6/elasticsearch/helpers/test.pyi000066400000000000000000000024471426163262700230520ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Tuple from unittest import TestCase from ..client import Elasticsearch ELASTICSEARCH_URL: str CA_CERTS: str def get_test_client(nowait: bool = ..., **kwargs: Any) -> Elasticsearch: ... def _get_version(version_string: str) -> Tuple[int, ...]: ... class ElasticsearchTestCase(TestCase): @staticmethod def _get_client() -> Elasticsearch: ... @classmethod def setup_class(cls) -> None: ... def teardown_method(self, _: Any) -> None: ... def es_version(self) -> Tuple[int, ...]: ... elasticsearch-py-7.17.6/elasticsearch/py.typed000066400000000000000000000000001426163262700213640ustar00rootroot00000000000000elasticsearch-py-7.17.6/elasticsearch/serializer.py000066400000000000000000000165771426163262700224420ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. try: import simplejson as json except ImportError: import json import uuid from datetime import date, datetime from decimal import Decimal from .compat import string_types from .exceptions import ImproperlyConfigured, SerializationError INTEGER_TYPES = () FLOAT_TYPES = (Decimal,) TIME_TYPES = (date, datetime) class Serializer(object): mimetype = "" def loads(self, s): raise NotImplementedError() def dumps(self, data): raise NotImplementedError() class TextSerializer(Serializer): mimetype = "text/plain" def loads(self, s): return s def dumps(self, data): if isinstance(data, string_types): return data raise SerializationError("Cannot serialize %r into text." % data) class JSONSerializer(Serializer): mimetype = "application/json" def default(self, data): if isinstance(data, TIME_TYPES): # Little hack to avoid importing pandas but to not # return 'NaT' string for pd.NaT as that's not a valid # Elasticsearch date. formatted_data = data.isoformat() if formatted_data != "NaT": return formatted_data if isinstance(data, uuid.UUID): return str(data) elif isinstance(data, FLOAT_TYPES): return float(data) # This is kept for backwards compatibility even # if 'INTEGER_TYPES' isn't used by default anymore. elif INTEGER_TYPES and isinstance(data, INTEGER_TYPES): return int(data) # Special cases for numpy and pandas types # These are expensive to import so we try them last. serialized, value = _attempt_serialize_numpy_or_pandas(data) if serialized: return value raise TypeError("Unable to serialize %r (type: %s)" % (data, type(data))) def loads(self, s): try: return json.loads(s) except (ValueError, TypeError) as e: raise SerializationError(s, e) def dumps(self, data): # don't serialize strings if isinstance(data, string_types): return data try: return json.dumps( data, default=self.default, ensure_ascii=False, separators=(",", ":") ) except (ValueError, TypeError) as e: raise SerializationError(data, e) class MapboxVectorTileSerializer(Serializer): mimetype = "application/vnd.mapbox-vector-tile" def loads(self, s): return s def dumps(self, data): if isinstance(data, string_types): return data raise SerializationError("Cannot serialize %r into a MapBox vector tile" % data) DEFAULT_SERIALIZERS = { JSONSerializer.mimetype: JSONSerializer(), TextSerializer.mimetype: TextSerializer(), MapboxVectorTileSerializer.mimetype: MapboxVectorTileSerializer(), } class Deserializer(object): def __init__(self, serializers, default_mimetype="application/json"): try: self.default = serializers[default_mimetype] except KeyError: raise ImproperlyConfigured( "Cannot find default serializer (%s)" % default_mimetype ) self.serializers = serializers def loads(self, s, mimetype=None): if not mimetype: deserializer = self.default else: # split out 'charset' and 'compatible-width' options mimetype = mimetype.partition(";")[0].strip() # Treat 'application/vnd.elasticsearch+json' # as application/json for compatibility. if mimetype == "application/vnd.elasticsearch+json": mimetype = "application/json" try: deserializer = self.serializers[mimetype] except KeyError: raise SerializationError( "Unknown mimetype, unable to deserialize: %s" % mimetype ) return deserializer.loads(s) def _attempt_serialize_numpy_or_pandas(data): """Attempts to serialize a value from the numpy or pandas libraries. This function is separate from JSONSerializer because the inner functions are rewritten to be no-ops if either library isn't available to avoid attempting to import and raising an ImportError over and over again. Returns a tuple of (bool, Any) where the bool corresponds to whether the second value contains a properly serialized value and thus should be returned by JSONSerializer.default(). """ serialized, value = _attempt_serialize_numpy(data) if serialized: return serialized, value serialized, value = _attempt_serialize_pandas(data) if serialized: return serialized, value return False, None def _attempt_serialize_numpy(data): global _attempt_serialize_numpy try: import numpy as np # type: ignore if isinstance( data, ( np.int_, np.intc, np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64, ), ): return True, int(data) elif isinstance( data, ( np.float_, np.float16, np.float32, np.float64, ), ): return True, float(data) elif isinstance(data, np.bool_): return True, bool(data) elif isinstance(data, np.datetime64): return True, data.item().isoformat() elif isinstance(data, np.ndarray): return True, data.tolist() except ImportError: # Since we failed to import 'numpy' we don't want to try again. _attempt_serialize_numpy = _attempt_serialize_noop return False, None def _attempt_serialize_pandas(data): global _attempt_serialize_pandas try: import pandas as pd # type: ignore if isinstance(data, (pd.Series, pd.Categorical)): return True, data.tolist() elif isinstance(data, pd.Timestamp) and data is not getattr(pd, "NaT", None): return True, data.isoformat() elif data is getattr(pd, "NA", None): return True, None except ImportError: # Since we failed to import 'pandas' we don't want to try again. _attempt_serialize_pandas = _attempt_serialize_noop return False, None def _attempt_serialize_noop(data): # noqa # Short-circuit if the above functions can't import # the corresponding library on the first attempt. return False, None elasticsearch-py-7.17.6/elasticsearch/serializer.pyi000066400000000000000000000033101426163262700225700ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Dict, Optional class Serializer(object): mimetype: str def loads(self, s: str) -> Any: ... def dumps(self, data: Any) -> str: ... class TextSerializer(Serializer): mimetype: str def loads(self, s: str) -> Any: ... def dumps(self, data: Any) -> str: ... class JSONSerializer(Serializer): mimetype: str def default(self, data: Any) -> Any: ... def loads(self, s: str) -> Any: ... def dumps(self, data: Any) -> str: ... class MapboxVectorTileSerializer(Serializer): mimetype: str def loads(self, s: bytes) -> bytes: ... # type: ignore def dumps(self, data: bytes) -> bytes: ... # type: ignore DEFAULT_SERIALIZERS: Dict[str, Serializer] class Deserializer(object): def __init__( self, serializers: Dict[str, Serializer], default_mimetype: str = ..., ) -> None: ... def loads(self, s: str, mimetype: Optional[str] = ...) -> Any: ... elasticsearch-py-7.17.6/elasticsearch/transport.py000066400000000000000000000666441426163262700223250ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import re import time import warnings from itertools import chain from platform import python_version from ._version import __versionstr__ from .compat import Lock from .connection import Urllib3HttpConnection from .connection_pool import ConnectionPool, DummyConnectionPool, EmptyConnectionPool from .exceptions import ( AuthenticationException, AuthorizationException, ConnectionError, ConnectionTimeout, ElasticsearchWarning, SerializationError, TransportError, UnsupportedProductError, ) from .serializer import DEFAULT_SERIALIZERS, Deserializer, JSONSerializer from .utils import _client_meta_version def get_host_info(node_info, host): """ Simple callback that takes the node info from `/_cluster/nodes` and a parsed connection information and return the connection information. If `None` is returned this node will be skipped. Useful for filtering nodes (by proximity for example) or if additional information needs to be provided for the :class:`~elasticsearch.Connection` class. By default master only nodes are filtered out since they shouldn't typically be used for API operations. :arg node_info: node information from `/_cluster/nodes` :arg host: connection information (host, port) extracted from the node info """ # ignore master only nodes if node_info.get("roles", []) == ["master"]: return None return host class Transport(object): """ Encapsulation of transport-related to logic. Handles instantiation of the individual connections as well as creating a connection pool to hold them. Main interface is the `perform_request` method. """ DEFAULT_CONNECTION_CLASS = Urllib3HttpConnection def __init__( self, hosts, connection_class=None, connection_pool_class=ConnectionPool, host_info_callback=get_host_info, sniff_on_start=False, sniffer_timeout=None, sniff_timeout=0.1, sniff_on_connection_fail=False, serializer=JSONSerializer(), serializers=None, default_mimetype="application/json", max_retries=3, retry_on_status=(502, 503, 504), retry_on_timeout=False, send_get_body_as="GET", meta_header=True, **kwargs ): """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance :arg connection_class: subclass of :class:`~elasticsearch.Connection` to use :arg connection_pool_class: subclass of :class:`~elasticsearch.ConnectionPool` to use :arg host_info_callback: callback responsible for taking the node information from `/_cluster/nodes`, along with already extracted information, and producing a list of arguments (same as `hosts` parameter) :arg sniff_on_start: flag indicating whether to obtain a list of nodes from the cluster at startup time :arg sniffer_timeout: number of seconds between automatic sniffs :arg sniff_on_connection_fail: flag controlling if connection failure triggers a sniff :arg sniff_timeout: timeout used for the sniff request - it should be a fast api call and we are talking potentially to more nodes so we want to fail quickly. Not used during initial sniffing (if ``sniff_on_start`` is on) when the connection still isn't initialized. :arg serializer: serializer instance :arg serializers: optional dict of serializer instances that will be used for deserializing data coming from the server. (key is the mimetype) :arg default_mimetype: when no mimetype is specified by the server response assume this mimetype, defaults to `'application/json'` :arg max_retries: maximum number of retries before an exception is propagated :arg retry_on_status: set of HTTP status codes on which we should retry on a different node. defaults to ``(502, 503, 504)`` :arg retry_on_timeout: should timeout trigger a retry on different node? (default `False`) :arg send_get_body_as: for GET requests with body this option allows you to specify an alternate way of execution for environments that don't support passing bodies with GET requests. If you set this to 'POST' a POST method will be used instead, if to 'source' then the body will be serialized and passed as a query parameter `source`. :arg meta_header: If True will send the 'X-Elastic-Client-Meta' HTTP header containing simple client metadata. Setting to False will disable the header. Defaults to True. Any extra keyword arguments will be passed to the `connection_class` when creating and instance unless overridden by that connection's options provided as part of the hosts parameter. """ if connection_class is None: connection_class = self.DEFAULT_CONNECTION_CLASS if not isinstance(meta_header, bool): raise TypeError("meta_header must be of type bool") if send_get_body_as != "GET": warnings.warn( "The 'send_get_body_as' parameter is no longer necessary " "and will be removed in 8.0", category=DeprecationWarning, stacklevel=2, ) # serialization config _serializers = DEFAULT_SERIALIZERS.copy() # if a serializer has been specified, use it for deserialization as well _serializers[serializer.mimetype] = serializer # if custom serializers map has been supplied, override the defaults with it if serializers: _serializers.update(serializers) # create a deserializer with our config self.deserializer = Deserializer(_serializers, default_mimetype) self.max_retries = max_retries self.retry_on_timeout = retry_on_timeout self.retry_on_status = retry_on_status self.send_get_body_as = send_get_body_as self.meta_header = meta_header # data serializer self.serializer = serializer # store all strategies... self.connection_pool_class = connection_pool_class self.connection_class = connection_class # ...save kwargs to be passed to the connections self.kwargs = kwargs self.hosts = hosts # Start with an empty pool specifically for `AsyncTransport`. # It should never be used, will be replaced on first call to # .set_connections() self.connection_pool = EmptyConnectionPool() if hosts: # ...and instantiate them self.set_connections(hosts) # retain the original connection instances for sniffing self.seed_connections = list(self.connection_pool.connections[:]) else: self.seed_connections = [] # Don't enable sniffing on Cloud instances. if kwargs.get("cloud_id", False): sniff_on_start = False sniff_on_connection_fail = False # sniffing data self.sniffer_timeout = sniffer_timeout self.sniff_on_start = sniff_on_start self.sniff_on_connection_fail = sniff_on_connection_fail self.last_sniff = time.time() self.sniff_timeout = sniff_timeout # callback to construct host dict from data in /_cluster/nodes self.host_info_callback = host_info_callback if sniff_on_start: self.sniff_hosts(True) # Create the default metadata for the x-elastic-client-meta # HTTP header. Only requires adding the (service, service_version) # tuple to the beginning of the client_meta self._client_meta = ( ("es", _client_meta_version(__versionstr__)), ("py", _client_meta_version(python_version())), ("t", _client_meta_version(__versionstr__)), ) # Grab the 'HTTP_CLIENT_META' property from the connection class http_client_meta = getattr(connection_class, "HTTP_CLIENT_META", None) if http_client_meta: self._client_meta += (http_client_meta,) # Tri-state flag that describes what state the verification # of whether we're connected to an Elasticsearch cluster or not. # The three states are: # - 'None': Means we've either not started the verification process # or that the verification is in progress. '_verified_once' ensures # that multiple requests don't kick off multiple verification processes. # - 'True': Means we've verified that we're talking to Elasticsearch or # that we can't rule out Elasticsearch due to auth issues. A warning # will be raised if we receive 401/403. # - 'int': Means we're talking to an unsupported product, should raise # the corresponding error. self._verified_elasticsearch = None # Ensures that the ES verification request only fires once and that # all requests block until this request returns back. self._verify_elasticsearch_lock = Lock() def add_connection(self, host): """ Create a new :class:`~elasticsearch.Connection` instance and add it to the pool. :arg host: kwargs that will be used to create the instance """ self.hosts.append(host) self.set_connections(self.hosts) def set_connections(self, hosts): """ Instantiate all the connections and create new connection pool to hold them. Tries to identify unchanged hosts and re-use existing :class:`~elasticsearch.Connection` instances. :arg hosts: same as `__init__` """ # construct the connections def _create_connection(host): # if this is not the initial setup look at the existing connection # options and identify connections that haven't changed and can be # kept around. if hasattr(self, "connection_pool"): for (connection, old_host) in self.connection_pool.connection_opts: if old_host == host: return connection # previously unseen params, create new connection kwargs = self.kwargs.copy() kwargs.update(host) return self.connection_class(**kwargs) connections = map(_create_connection, hosts) connections = list(zip(connections, hosts)) if len(connections) == 1: self.connection_pool = DummyConnectionPool(connections) else: # pass the hosts dicts to the connection pool to optionally extract parameters from self.connection_pool = self.connection_pool_class( connections, **self.kwargs ) def get_connection(self): """ Retrieve a :class:`~elasticsearch.Connection` instance from the :class:`~elasticsearch.ConnectionPool` instance. """ if self.sniffer_timeout: if time.time() >= self.last_sniff + self.sniffer_timeout: self.sniff_hosts() return self.connection_pool.get_connection() def _get_sniff_data(self, initial=False): """ Perform the request to get sniffing information. Returns a list of dictionaries (one per node) containing all the information from the cluster. It also sets the last_sniff attribute in case of a successful attempt. In rare cases it might be possible to override this method in your custom Transport class to serve data from alternative source like configuration management. """ previous_sniff = self.last_sniff try: # reset last_sniff timestamp self.last_sniff = time.time() # go through all current connections as well as the # seed_connections for good measure for c in chain(self.connection_pool.connections, self.seed_connections): try: # use small timeout for the sniffing request, should be a fast api call _, headers, node_info = c.perform_request( "GET", "/_nodes/_all/http", timeout=self.sniff_timeout if not initial else None, ) # Lowercase all the header names for consistency in accessing them. headers = { header.lower(): value for header, value in headers.items() } node_info = self.deserializer.loads( node_info, headers.get("content-type") ) break except (ConnectionError, SerializationError): pass else: raise TransportError("N/A", "Unable to sniff hosts.") except Exception: # keep the previous value on error self.last_sniff = previous_sniff raise return list(node_info["nodes"].values()) def _get_host_info(self, host_info): host = {} address = host_info.get("http", {}).get("publish_address") # malformed or no address given if not address or ":" not in address: return None if "/" in address: # Support 7.x host/ip:port behavior where http.publish_host has been set. fqdn, ipaddress = address.split("/", 1) host["host"] = fqdn _, host["port"] = ipaddress.rsplit(":", 1) host["port"] = int(host["port"]) else: host["host"], host["port"] = address.rsplit(":", 1) host["port"] = int(host["port"]) return self.host_info_callback(host_info, host) def sniff_hosts(self, initial=False): """ Obtain a list of nodes from the cluster and create a new connection pool using the information retrieved. To extract the node connection parameters use the ``nodes_to_host_callback``. :arg initial: flag indicating if this is during startup (``sniff_on_start``), ignore the ``sniff_timeout`` if ``True`` """ node_info = self._get_sniff_data(initial) hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes or host_info_callback blocked all - # raise error. if not hosts: raise TransportError( "N/A", "Unable to sniff hosts - no viable hosts found." ) self.set_connections(hosts) def mark_dead(self, connection): """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. :arg connection: instance of :class:`~elasticsearch.Connection` that failed """ # mark as dead even when sniffing to avoid hitting this host during the sniff process self.connection_pool.mark_dead(connection) if self.sniff_on_connection_fail: self.sniff_hosts() def perform_request(self, method, url, headers=None, params=None, body=None): """ Perform the actual request. Retrieve a connection from the connection pool, pass all the information to it's perform_request method and return the data. If an exception was raised, mark the connection as failed and retry (up to `max_retries` times). If the operation was successful and the connection used was previously marked as dead, mark it as live, resetting it's failure count. :arg method: HTTP method to use :arg url: absolute url (without host) to target :arg headers: dictionary of headers, will be handed over to the underlying :class:`~elasticsearch.Connection` class :arg params: dictionary of query parameters, will be handed over to the underlying :class:`~elasticsearch.Connection` class for serialization :arg body: body of the request, will be serialized using serializer and passed to the connection """ method, headers, params, body, ignore, timeout = self._resolve_request_args( method, headers, params, body ) # Before we make the actual API call we verify the Elasticsearch instance. if self._verified_elasticsearch is None: self._do_verify_elasticsearch(headers=headers, timeout=timeout) # If '_verified_elasticsearch' isn't 'True' then we raise an error. if self._verified_elasticsearch is not True: _ProductChecker.raise_error(self._verified_elasticsearch) for attempt in range(self.max_retries + 1): connection = self.get_connection() try: status, headers_response, data = connection.perform_request( method, url, params, body, headers=headers, ignore=ignore, timeout=timeout, ) # Lowercase all the header names for consistency in accessing them. headers_response = { header.lower(): value for header, value in headers_response.items() } except TransportError as e: if method == "HEAD" and e.status_code == 404: return False retry = False if isinstance(e, ConnectionTimeout): retry = self.retry_on_timeout elif isinstance(e, ConnectionError): retry = True elif e.status_code in self.retry_on_status: retry = True if retry: try: # only mark as dead if we are retrying self.mark_dead(connection) except TransportError: # If sniffing on failure, it could fail too. Catch the # exception not to interrupt the retries. pass # raise exception on last retry if attempt == self.max_retries: raise e else: raise e else: # connection didn't fail, confirm it's live status self.connection_pool.mark_live(connection) if method == "HEAD": return 200 <= status < 300 if data: data = self.deserializer.loads( data, headers_response.get("content-type") ) return data def close(self): """ Explicitly closes connections """ self.connection_pool.close() def _resolve_request_args(self, method, headers, params, body): """Resolves parameters for .perform_request()""" if body is not None: body = self.serializer.dumps(body) # some clients or environments don't support sending GET with body if method in ("HEAD", "GET") and self.send_get_body_as != "GET": # send it as post instead if self.send_get_body_as == "POST": method = "POST" # or as source parameter elif self.send_get_body_as == "source": if params is None: params = {} params["source"] = body body = None if body is not None: try: body = body.encode("utf-8", "surrogatepass") except (UnicodeDecodeError, AttributeError): # bytes/str - no need to re-encode pass ignore = () timeout = None if params: timeout = params.pop("request_timeout", None) ignore = params.pop("ignore", ()) if isinstance(ignore, int): ignore = (ignore,) client_meta = params.pop("__elastic_client_meta", ()) else: client_meta = () if self.meta_header: headers = headers or {} client_meta = self._client_meta + client_meta headers["x-elastic-client-meta"] = ",".join( "%s=%s" % (k, v) for k, v in client_meta ) return method, headers, params, body, ignore, timeout def _do_verify_elasticsearch(self, headers, timeout): """Verifies that we're connected to an Elasticsearch cluster. This is done at least once before the first actual API call and makes a single request to the 'GET /' API endpoint to check the version along with other details of the response. If we're unable to verify we're talking to Elasticsearch but we're also unable to rule it out due to a permission error we instead emit an 'ElasticsearchWarning'. """ # Ensure that there's only one thread within this section # at a time to not emit unnecessary index API calls. with self._verify_elasticsearch_lock: # Product check has already been completed while we were # waiting our turn, no need to do again. if self._verified_elasticsearch is not None: return headers = { header.lower(): value for header, value in (headers or {}).items() } # We know we definitely want JSON so request it via 'accept' headers.setdefault("accept", "application/json") info_headers = {} info_response = {} error = None attempted_conns = [] for conn in chain(self.connection_pool.connections, self.seed_connections): # Only attempt once per connection max. if conn in attempted_conns: continue attempted_conns.append(conn) try: _, info_headers, info_response = conn.perform_request( "GET", "/", headers=headers, timeout=timeout ) # Lowercase all the header names for consistency in accessing them. info_headers = { header.lower(): value for header, value in info_headers.items() } info_response = self.deserializer.loads( info_response, mimetype="application/json" ) break # Previous versions of 7.x Elasticsearch required a specific # permission so if we receive HTTP 401/403 we should warn # instead of erroring out. except (AuthenticationException, AuthorizationException): warnings.warn( ( "The client is unable to verify that the server is " "Elasticsearch due security privileges on the server side" ), ElasticsearchWarning, stacklevel=5, ) self._verified_elasticsearch = True return # This connection didn't work, we'll try another. except (ConnectionError, SerializationError, TransportError) as err: if error is None: error = err # If we received a connection error and weren't successful # anywhere then we re-raise the more appropriate error. if error and not info_response: raise error # Check the information we got back from the index request. self._verified_elasticsearch = _ProductChecker.check_product( info_headers, info_response ) class _ProductChecker: """Class which verifies we're connected to a supported product""" # States that can be returned from 'check_product' SUCCESS = True UNSUPPORTED_PRODUCT = 2 UNSUPPORTED_DISTRIBUTION = 3 @classmethod def raise_error(cls, state): # These states mean the product_check() didn't fail so do nothing. if state in (None, True): return if state == cls.UNSUPPORTED_DISTRIBUTION: message = ( "The client noticed that the server is not " "a supported distribution of Elasticsearch" ) else: # UNSUPPORTED_PRODUCT message = ( "The client noticed that the server is not Elasticsearch " "and we do not support this unknown product" ) raise UnsupportedProductError(message) @classmethod def check_product(cls, headers, response): # type: (dict[str, str], dict[str, str]) -> int """Verifies that the server we're talking to is Elasticsearch. Does this by checking HTTP headers and the deserialized response to the 'info' API. Returns one of the states above. """ try: version = response.get("version", {}) version_number = tuple( int(x) if x is not None else 999 for x in re.search( r"^([0-9]+)\.([0-9]+)(?:\.([0-9]+))?", version["number"] ).groups() ) except (KeyError, TypeError, ValueError, AttributeError): # No valid 'version.number' field, effectively 0.0.0 version = {} version_number = (0, 0, 0) # Check all of the fields and headers for missing/valid values. try: bad_tagline = response.get("tagline", None) != "You Know, for Search" bad_build_flavor = version.get("build_flavor", None) != "default" bad_product_header = ( headers.get("x-elastic-product", None) != "Elasticsearch" ) except (AttributeError, TypeError): bad_tagline = True bad_build_flavor = True bad_product_header = True # 7.0-7.13 and there's a bad 'tagline' or unsupported 'build_flavor' if (7, 0, 0) <= version_number < (7, 14, 0): if bad_tagline: return cls.UNSUPPORTED_PRODUCT elif bad_build_flavor: return cls.UNSUPPORTED_DISTRIBUTION elif ( # No version or version less than 6.x version_number < (6, 0, 0) # 6.x and there's a bad 'tagline' or ((6, 0, 0) <= version_number < (7, 0, 0) and bad_tagline) # 7.14+ and there's a bad 'X-Elastic-Product' HTTP header or ((7, 14, 0) <= version_number and bad_product_header) ): return cls.UNSUPPORTED_PRODUCT return True elasticsearch-py-7.17.6/elasticsearch/transport.pyi000066400000000000000000000062731426163262700224660ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union from .connection import Connection from .connection_pool import ConnectionPool from .serializer import Deserializer, Serializer def get_host_info( node_info: Dict[str, Any], host: Optional[Dict[str, Any]] ) -> Optional[Dict[str, Any]]: ... class Transport(object): DEFAULT_CONNECTION_CLASS: Type[Connection] connection_pool: ConnectionPool deserializer: Deserializer max_retries: int retry_on_timeout: bool retry_on_status: Collection[int] send_get_body_as: str serializer: Serializer connection_pool_class: Type[ConnectionPool] connection_class: Type[Connection] kwargs: Any hosts: Optional[List[Dict[str, Any]]] seed_connections: List[Connection] sniffer_timeout: Optional[float] sniff_on_start: bool sniff_on_connection_fail: bool last_sniff: float sniff_timeout: Optional[float] host_info_callback: Callable[ [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] ] def __init__( self, hosts: Any, connection_class: Optional[Type[Any]] = ..., connection_pool_class: Type[ConnectionPool] = ..., host_info_callback: Callable[ [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] ] = ..., sniff_on_start: bool = ..., sniffer_timeout: Optional[float] = ..., sniff_timeout: float = ..., sniff_on_connection_fail: bool = ..., serializer: Serializer = ..., serializers: Optional[Mapping[str, Serializer]] = ..., default_mimetype: str = ..., max_retries: int = ..., retry_on_status: Collection[int] = ..., retry_on_timeout: bool = ..., send_get_body_as: str = ..., meta_header: bool = ..., **kwargs: Any ) -> None: ... def add_connection(self, host: Any) -> None: ... def set_connections(self, hosts: Collection[Any]) -> None: ... def get_connection(self) -> Connection: ... def sniff_hosts(self, initial: bool = ...) -> None: ... def mark_dead(self, connection: Connection) -> None: ... def perform_request( self, method: str, url: str, headers: Optional[Mapping[str, str]] = ..., params: Optional[Mapping[str, Any]] = ..., body: Optional[Any] = ..., ) -> Union[bool, Any]: ... def close(self) -> None: ... elasticsearch-py-7.17.6/elasticsearch/utils.py000066400000000000000000000022301426163262700214060ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import re def _client_meta_version(version): """Transforms a Python package version to one compatible with 'X-Elastic-Client-Meta'. Essentially replaces any pre-release information with a 'p' suffix. """ version, version_pre = re.match( r"^([0-9][0-9.]*[0-9]|[0-9])(.*)$", version ).groups() if version_pre: version += "p" return version elasticsearch-py-7.17.6/elasticsearch/utils.pyi000066400000000000000000000015071426163262700215650ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. def _client_meta_version(version: str) -> str: ... elasticsearch-py-7.17.6/noxfile.py000066400000000000000000000057221426163262700171110ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import nox SOURCE_FILES = ( "setup.py", "noxfile.py", "elasticsearch/", "test_elasticsearch/", "utils/", ) @nox.session(python=["2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9"]) def test(session): session.install(".") session.install("-r", "dev-requirements.txt") session.run("python", "setup.py", "test") @nox.session() def format(session): session.install("black==21.12b0", "click==8.0.4", "isort") session.run("isort", "--profile=black", *SOURCE_FILES) session.run("black", "--target-version=py27", *SOURCE_FILES) session.run("python", "utils/license-headers.py", "fix", *SOURCE_FILES) lint(session) @nox.session() def lint(session): session.install( "flake8", "black==21.12b0", "click==8.0.4", "mypy", "isort", "types-requests" ) session.run("isort", "--check", "--profile=black", *SOURCE_FILES) session.run("black", "--target-version=py27", "--check", *SOURCE_FILES) session.run("flake8", *SOURCE_FILES) session.run("python", "utils/license-headers.py", "check", *SOURCE_FILES) # Workaround to make '-r' to still work despite uninstalling aiohttp below. session.run("python", "-m", "pip", "install", "aiohttp") # Run mypy on the package and then the type examples separately for # the two different mypy use-cases, ourselves and our users. session.run("mypy", "--strict", "elasticsearch/") session.run("mypy", "--strict", "test_elasticsearch/test_types/sync_types.py") session.run("mypy", "--strict", "test_elasticsearch/test_types/async_types.py") # Make sure we don't require aiohttp to be installed for users to # receive type hint information from mypy. session.run("python", "-m", "pip", "uninstall", "--yes", "aiohttp") session.run("mypy", "--strict", "elasticsearch/") session.run("mypy", "--strict", "test_elasticsearch/test_types/sync_types.py") @nox.session() def docs(session): session.install(".") session.install( "-rdev-requirements.txt", "sphinx-rtd-theme", "sphinx-autodoc-typehints" ) session.run("python", "-m", "pip", "install", "sphinx-autodoc-typehints") session.run("sphinx-build", "docs/sphinx/", "docs/sphinx/_build", "-b", "html") elasticsearch-py-7.17.6/setup.cfg000066400000000000000000000006541426163262700167130ustar00rootroot00000000000000[build_sphinx] source-dir = docs/ build-dir = docs/_build all_files = 1 [bdist_wheel] universal = 1 [bdist_rpm] requires = python python-urllib3 [flake8] ignore = E203, E266, E501, W503 [tool:pytest] junit_family=legacy addopts = -vvv -p no:logging --cov-report=term-missing --cov=elasticsearch --cov-config=.coveragerc [tool:isort] profile=black [report] show_missing = True exclude_lines= raise NotImplementedError* elasticsearch-py-7.17.6/setup.py000066400000000000000000000102241426163262700165760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import re from os.path import abspath, dirname, join from setuptools import find_packages, setup package_name = "elasticsearch" base_dir = abspath(dirname(__file__)) with open(join(base_dir, package_name, "_version.py")) as f: package_version = re.search( r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read() ).group(1) with open(join(base_dir, "README.rst")) as f: # Remove reST raw directive from README as they're not allowed on PyPI # Those blocks start with a newline and continue until the next newline mode = None lines = [] for line in f: if line.startswith(".. raw::"): mode = "ignore_nl" elif line == "\n": mode = "wait_nl" if mode == "ignore_nl" else None if mode is None: lines.append(line) long_description = "".join(lines) packages = [ package for package in find_packages(where=".", exclude=("test_elasticsearch*",)) if package == package_name or package.startswith(package_name + ".") ] install_requires = [ "urllib3>=1.21.1, <2", "certifi", ] tests_require = [ "requests>=2.0.0, <3.0.0", "coverage", "mock", "pyyaml", "pytest", "pytest-cov", ] async_require = ["aiohttp>=3,<4"] docs_require = ["sphinx<1.7", "sphinx_rtd_theme"] generate_require = ["black", "jinja2"] setup( name=package_name, description="Python client for Elasticsearch", license="Apache-2.0", url="https://github.com/elastic/elasticsearch-py", long_description=long_description, long_description_content_type="text/x-rst", version=package_version, author="Honza Král, Nick Lang", author_email="honza.kral@gmail.com, nick@nicklang.com", maintainer="Seth Michael Larson", maintainer_email="seth.larson@elastic.co", project_urls={ "Documentation": "https://elasticsearch-py.readthedocs.io", "Source Code": "https://github.com/elastic/elasticsearch-py", "Issue Tracker": "https://github.com/elastic/elasticsearch-py/issues", }, packages=packages, package_data={"elasticsearch": ["py.typed", "*.pyi"]}, include_package_data=True, zip_safe=False, classifiers=[ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4", install_requires=install_requires, test_suite="test_elasticsearch.run_tests.run_all", tests_require=tests_require, extras_require={ "develop": tests_require + docs_require + generate_require, "docs": docs_require, "requests": ["requests>=2.4.0, <3.0.0"], "async": async_require, }, ) elasticsearch-py-7.17.6/test_elasticsearch/000077500000000000000000000000001426163262700207365ustar00rootroot00000000000000elasticsearch-py-7.17.6/test_elasticsearch/__init__.py000066400000000000000000000014231426163262700230470ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. elasticsearch-py-7.17.6/test_elasticsearch/run_tests.py000077500000000000000000000104031426163262700233370ustar00rootroot00000000000000#!/usr/bin/env python # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import print_function import subprocess import sys from os import environ from os.path import abspath, dirname, exists, join, pardir def fetch_es_repo(): # user is manually setting YAML dir, don't tamper with it if "TEST_ES_YAML_DIR" in environ: return repo_path = environ.get( "TEST_ES_REPO", abspath(join(dirname(__file__), pardir, pardir, "elasticsearch")), ) # no repo if not exists(repo_path) or not exists(join(repo_path, ".git")): subprocess.check_call( "git clone https://github.com/elastic/elasticsearch %s" % repo_path, shell=True, ) # set YAML test dir environ["TEST_ES_YAML_DIR"] = join( repo_path, "rest-api-spec", "src", "main", "resources", "rest-api-spec", "test" ) # fetching of yaml tests disabled, we'll run with what's there if environ.get("TEST_ES_NOFETCH", False): return from test_elasticsearch.test_cases import SkipTest from test_elasticsearch.test_server import get_client # find out the sha of the running es try: es = get_client() sha = es.info()["version"]["build_hash"] except (SkipTest, KeyError): print("No running elasticsearch >1.X server...") return # fetch new commits to be sure... print("Fetching elasticsearch repo...") subprocess.check_call( "cd %s && git fetch https://github.com/elastic/elasticsearch.git" % repo_path, shell=True, ) # reset to the version from info() subprocess.check_call("cd %s && git fetch" % repo_path, shell=True) subprocess.check_call("cd %s && git reset --hard %s" % (repo_path, sha), shell=True) def run_all(argv=None): sys.exitfunc = lambda: sys.stderr.write("Shutting down....\n") # fetch yaml tests anywhere that's not GitHub Actions if "GITHUB_ACTION" not in environ: fetch_es_repo() # always insert coverage when running tests if argv is None: junit_xml = join( abspath(dirname(dirname(__file__))), "junit", "elasticsearch-py-junit.xml" ) argv = [ "pytest", "--cov=elasticsearch", "--junitxml=%s" % junit_xml, "--log-level=DEBUG", "--cache-clear", "-vv", ] ignores = [] # Python 3.6+ is required for async if sys.version_info < (3, 6): ignores.append("test_elasticsearch/test_async/") # GitHub Actions, run non-server tests if "GITHUB_ACTION" in environ: ignores.extend( [ "test_elasticsearch/test_server/", "test_elasticsearch/test_async/test_server/", ] ) if ignores: argv.extend(["--ignore=%s" % ignore for ignore in ignores]) # Jenkins, only run server tests if environ.get("TEST_TYPE") == "server": test_dir = abspath(dirname(__file__)) argv.append(join(test_dir, "test_server")) if sys.version_info >= (3, 6): argv.append(join(test_dir, "test_async/test_server")) # Not in CI, run all tests specified. else: argv.append(abspath(dirname(__file__))) exit_code = 0 try: subprocess.check_call(argv, stdout=sys.stdout, stderr=sys.stderr) except subprocess.CalledProcessError as e: exit_code = e.returncode sys.exit(exit_code) if __name__ == "__main__": run_all(sys.argv) elasticsearch-py-7.17.6/test_elasticsearch/test_async/000077500000000000000000000000001426163262700231125ustar00rootroot00000000000000elasticsearch-py-7.17.6/test_elasticsearch/test_async/__init__.py000066400000000000000000000014231426163262700252230ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_connection.py000066400000000000000000000447761426163262700267040ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import gzip import io import json import re import ssl import warnings from platform import python_version import aiohttp import pytest from mock import patch from multidict import CIMultiDict from elasticsearch import AIOHttpConnection, AsyncElasticsearch, __versionstr__ from elasticsearch.compat import reraise_exceptions from elasticsearch.exceptions import ConnectionError, NotFoundError pytestmark = pytest.mark.asyncio def gzip_decompress(data): buf = gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") return buf.read() class TestAIOHttpConnection: async def _get_mock_connection( self, connection_params={}, status_code=200, response_body=b"{}" ): con = AIOHttpConnection(**connection_params) await con._create_aiohttp_session() def _dummy_request(*args, **kwargs): class DummyResponse: async def __aenter__(self, *_, **__): return self async def __aexit__(self, *_, **__): pass async def text(self): return response_body.decode("utf-8", "surrogatepass") async def read(self): return response_body dummy_response = DummyResponse() dummy_response.headers = CIMultiDict() dummy_response.status = status_code _dummy_request.call_args = (args, kwargs) return dummy_response con.session.request = _dummy_request return con async def test_ssl_context(self): try: context = ssl.create_default_context() except AttributeError: # if create_default_context raises an AttributeError Exception # it means SSLContext is not available for that version of python # and we should skip this test. pytest.skip( "Test test_ssl_context is skipped cause SSLContext is not available for this version of Python" ) con = AIOHttpConnection(use_ssl=True, ssl_context=context) await con._create_aiohttp_session() assert con.use_ssl assert con.session.connector._ssl == context def test_opaque_id(self): con = AIOHttpConnection(opaque_id="app-1") assert con.headers["x-opaque-id"] == "app-1" def test_http_cloud_id(self): con = AIOHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==" ) assert con.use_ssl assert ( con.host == "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) assert con.port is None assert con.hostname == "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" assert con.http_compress con = AIOHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", port=9243, ) assert ( con.host == "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io:9243" ) assert con.port == 9243 assert con.hostname == "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" def test_api_key_auth(self): # test with tuple con = AIOHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", api_key=("elastic", "changeme1"), ) assert con.headers["authorization"] == "ApiKey ZWxhc3RpYzpjaGFuZ2VtZTE=" assert ( con.host == "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) # test with base64 encoded string con = AIOHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", api_key="ZWxhc3RpYzpjaGFuZ2VtZTI=", ) assert con.headers["authorization"] == "ApiKey ZWxhc3RpYzpjaGFuZ2VtZTI=" assert ( con.host == "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) async def test_no_http_compression(self): con = await self._get_mock_connection() assert not con.http_compress assert "accept-encoding" not in con.headers await con.perform_request("GET", "/") _, kwargs = con.session.request.call_args assert not kwargs["data"] assert "accept-encoding" not in kwargs["headers"] assert "content-encoding" not in kwargs["headers"] async def test_http_compression(self): con = await self._get_mock_connection({"http_compress": True}) assert con.http_compress assert con.headers["accept-encoding"] == "gzip,deflate" # 'content-encoding' shouldn't be set at a connection level. # Should be applied only if the request is sent with a body. assert "content-encoding" not in con.headers await con.perform_request("GET", "/", body=b"{}") _, kwargs = con.session.request.call_args assert gzip_decompress(kwargs["data"]) == b"{}" assert kwargs["headers"]["accept-encoding"] == "gzip,deflate" assert kwargs["headers"]["content-encoding"] == "gzip" await con.perform_request("GET", "/") _, kwargs = con.session.request.call_args assert not kwargs["data"] assert kwargs["headers"]["accept-encoding"] == "gzip,deflate" assert "content-encoding" not in kwargs["headers"] def test_cloud_id_http_compress_override(self): # 'http_compress' will be 'True' by default for connections with # 'cloud_id' set but should prioritize user-defined values. con = AIOHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", ) assert con.http_compress is True con = AIOHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", http_compress=False, ) assert con.http_compress is False con = AIOHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", http_compress=True, ) assert con.http_compress is True async def test_url_prefix(self): con = await self._get_mock_connection( connection_params={"url_prefix": "/_search/"} ) assert con.url_prefix == "/_search" await con.perform_request("GET", "/") # Need to convert the yarl URL to a string to compare. method, yarl_url = con.session.request.call_args[0] assert method == "GET" and str(yarl_url) == "http://localhost:9200/_search/" def test_default_user_agent(self): con = AIOHttpConnection() assert con._get_default_user_agent() == "elasticsearch-py/%s (Python %s)" % ( __versionstr__, python_version(), ) def test_timeout_set(self): con = AIOHttpConnection(timeout=42) assert 42 == con.timeout def test_keep_alive_is_on_by_default(self): con = AIOHttpConnection() assert { "connection": "keep-alive", "user-agent": con._get_default_user_agent(), } == con.headers def test_http_auth(self): con = AIOHttpConnection(http_auth="username:secret") assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", "connection": "keep-alive", "user-agent": con._get_default_user_agent(), } == con.headers def test_http_auth_tuple(self): con = AIOHttpConnection(http_auth=("username", "secret")) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", "connection": "keep-alive", "user-agent": con._get_default_user_agent(), } == con.headers def test_http_auth_list(self): con = AIOHttpConnection(http_auth=["username", "secret"]) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", "connection": "keep-alive", "user-agent": con._get_default_user_agent(), } == con.headers def test_uses_https_if_verify_certs_is_off(self): with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection(use_ssl=True, verify_certs=False) assert 1 == len(w) assert ( "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure." == str(w[0].message) ) assert con.use_ssl assert con.scheme == "https" assert con.host == "https://localhost:9200" async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self): with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False ) await con._create_aiohttp_session() assert w == [] assert isinstance(con.session, aiohttp.ClientSession) def test_doesnt_use_https_if_not_specified(self): con = AIOHttpConnection() assert not con.use_ssl def test_no_warning_when_using_ssl_context(self): ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: AIOHttpConnection(ssl_context=ctx) assert w == [], str([x.message for x in w]) def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, {"verify_certs": True}, {"verify_certs": False}, {"ca_certs": "/path/to/certs"}, {"ssl_show_warn": True, "ca_certs": "/path/to/certs"}, ): kwargs["ssl_context"] = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") AIOHttpConnection(**kwargs) assert 1 == len(w) assert ( "When using `ssl_context`, all other SSL related kwargs are ignored" == str(w[0].message) ) @patch("elasticsearch.connection.base.logger") async def test_uncompressed_body_logged(self, logger): con = await self._get_mock_connection(connection_params={"http_compress": True}) await con.perform_request("GET", "/", body=b'{"example": "body"}') assert 2 == logger.debug.call_count req, resp = logger.debug.call_args_list assert '> {"example": "body"}' == req[0][0] % req[0][1:] assert "< {}" == resp[0][0] % resp[0][1:] @patch("elasticsearch.connection.base.logger") async def test_full_url_logged(self, logger): conn = await self._get_mock_connection() await conn.perform_request( "GET", "/", params={"key": "val"}, body=b'{"example": "body"}' ) assert logger.info.call_count == 1 assert ( logger.info.call_args_list[0][0][0] % logger.info.call_args_list[0][0][1:] == "GET http://localhost:9200/?key=val [status:200 request:0.000s]" ) conn = await self._get_mock_connection(status_code=404) with pytest.raises(NotFoundError): await conn.perform_request( "GET", "/", params={"key": "val"}, body=b'{"example": "body"}' ) assert logger.warning.call_count == 1 assert ( logger.warning.call_args_list[0][0][0] % logger.warning.call_args_list[0][0][1:] == "GET http://localhost:9200/?key=val [status:404 request:0.000s]" ) @patch("elasticsearch.connection.base.tracer") @patch("elasticsearch.connection.base.logger") async def test_failed_request_logs_and_traces(self, logger, tracer): conn = await self._get_mock_connection( response_body=b'{"answer": 42}', status_code=404 ) with pytest.raises(NotFoundError): await conn.perform_request("GET", "/", params={"param": 42}, body=b"{}") # trace request assert 1 == tracer.info.call_count # trace response assert 1 == tracer.debug.call_count # log url and duration assert 1 == logger.warning.call_count assert re.match( r"^GET http://localhost:9200/\?param=42 \[status:404 request:0.[0-9]{3}s\]", logger.warning.call_args[0][0] % logger.warning.call_args[0][1:], ) @patch("elasticsearch.connection.base.tracer") @patch("elasticsearch.connection.base.logger") async def test_success_logs_and_traces(self, logger, tracer): conn = await self._get_mock_connection( response_body=b"""{"answer": "that's it!"}""" ) await conn.perform_request( "GET", "/", {"param": 42}, """{"question": "what's that?"}""".encode("utf-8"), ) # trace request assert 1 == tracer.info.call_count assert ( """curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n "question": "what\\u0027s that?"\n}'""" == tracer.info.call_args[0][0] % tracer.info.call_args[0][1:] ) # trace response assert 1 == tracer.debug.call_count assert re.match( r'#\[200\] \(0.[0-9]{3}s\)\n#{\n# "answer": "that\\u0027s it!"\n#}', tracer.debug.call_args[0][0] % tracer.debug.call_args[0][1:], ) # log url and duration assert 1 == logger.info.call_count assert re.match( r"GET http://localhost:9200/\?param=42 \[status:200 request:0.[0-9]{3}s\]", logger.info.call_args[0][0] % logger.info.call_args[0][1:], ) # log request body and response assert 2 == logger.debug.call_count req, resp = logger.debug.call_args_list assert '> {"question": "what\'s that?"}' == req[0][0] % req[0][1:] assert '< {"answer": "that\'s it!"}' == resp[0][0] % resp[0][1:] async def test_surrogatepass_into_bytes(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) status, headers, data = await con.perform_request("GET", "/") assert u"你好\uda6a" == data @pytest.mark.parametrize("exception_cls", reraise_exceptions) async def test_recursion_error_reraised(self, exception_cls): conn = AIOHttpConnection() def request_raise(*_, **__): raise exception_cls("Wasn't modified!") await conn._create_aiohttp_session() conn.session.request = request_raise with pytest.raises(exception_cls) as e: await conn.perform_request("GET", "/") assert str(e.value) == "Wasn't modified!" class TestConnectionHttpbin: """Tests the HTTP connection implementations against a live server E2E""" async def httpbin_anything(self, conn, **kwargs): status, headers, data = await conn.perform_request("GET", "/anything", **kwargs) data = json.loads(data) data["headers"].pop( "X-Amzn-Trace-Id", None ) # Remove this header as it's put there by AWS. assert all(header == header.lower() for header in headers) return (status, data) async def test_aiohttp_connection(self): # Defaults conn = AIOHttpConnection("httpbin.org", port=443, use_ssl=True) user_agent = conn._get_default_user_agent() status, data = await self.httpbin_anything(conn) assert status == 200 assert data["method"] == "GET" assert data["headers"] == { "Host": "httpbin.org", "User-Agent": user_agent, } # http_compress=False conn = AIOHttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=False ) status, data = await self.httpbin_anything(conn) assert status == 200 assert data["method"] == "GET" assert data["headers"] == { "Host": "httpbin.org", "User-Agent": user_agent, } # http_compress=True conn = AIOHttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=True ) status, data = await self.httpbin_anything(conn) assert status == 200 assert data["headers"] == { "Accept-Encoding": "gzip,deflate", "Host": "httpbin.org", "User-Agent": user_agent, } # Headers conn = AIOHttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=True, headers={"header1": "value1"}, ) status, data = await self.httpbin_anything( conn, headers={"header2": "value2", "header1": "override!"} ) assert status == 200 assert data["headers"] == { "Accept-Encoding": "gzip,deflate", "Host": "httpbin.org", "Header1": "override!", "Header2": "value2", "User-Agent": user_agent, } async def test_aiohttp_connection_error(self): conn = AIOHttpConnection("not.a.host.name") with pytest.raises(ConnectionError): await conn.perform_request("GET", "/") async def test_elasticsearch_connection_error(self): es = AsyncElasticsearch("http://not.a.host.name") with pytest.raises(ConnectionError): await es.search() elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_server/000077500000000000000000000000001426163262700254575ustar00rootroot00000000000000elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_server/__init__.py000066400000000000000000000014231426163262700275700ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_server/conftest.py000066400000000000000000000033621426163262700276620ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import asyncio import pytest import elasticsearch from elasticsearch.helpers.test import CA_CERTS, ELASTICSEARCH_URL from ...utils import wipe_cluster pytestmark = pytest.mark.asyncio @pytest.fixture(scope="function") async def async_client(): client = None try: if not hasattr(elasticsearch, "AsyncElasticsearch"): pytest.skip("test requires 'AsyncElasticsearch'") kw = {"timeout": 3, "ca_certs": CA_CERTS} client = elasticsearch.AsyncElasticsearch(ELASTICSEARCH_URL, **kw) # wait for yellow status for _ in range(100): try: await client.cluster.health(wait_for_status="yellow") break except ConnectionError: await asyncio.sleep(0.1) else: # timeout pytest.skip("Elasticsearch failed to start.") yield client finally: if client: wipe_cluster(client) await client.close() elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_server/test_clients.py000066400000000000000000000040611426163262700305320ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import pytest pytestmark = pytest.mark.asyncio class TestUnicode: async def test_indices_analyze(self, async_client): await async_client.indices.analyze(body='{"text": "привет"}') class TestBulk: async def test_bulk_works_with_string_body(self, async_client): docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) assert response["errors"] is False assert len(response["items"]) == 1 async def test_bulk_works_with_bytestring_body(self, async_client): docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) assert response["errors"] is False assert len(response["items"]) == 1 class TestYarlMissing: async def test_aiohttp_connection_works_without_yarl( self, async_client, monkeypatch ): # This is a defensive test case for if aiohttp suddenly stops using yarl. from elasticsearch._async import http_aiohttp monkeypatch.setattr(http_aiohttp, "yarl", False) resp = await async_client.info(pretty=True) assert isinstance(resp, dict) elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_server/test_helpers.py000066400000000000000000001103721426163262700305360ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # Licensed to Elasticsearch B.V under one or more agreements. # Elasticsearch B.V licenses this file to you under the Apache 2.0 License. # See the LICENSE file in the project root for more information import asyncio import warnings from datetime import datetime, timedelta, timezone import pytest from mock import MagicMock, patch from elasticsearch import TransportError, helpers from elasticsearch.helpers import ScanError pytestmark = pytest.mark.asyncio class AsyncMock(MagicMock): async def __call__(self, *args, **kwargs): return super(AsyncMock, self).__call__(*args, **kwargs) def __await__(self): return self().__await__() class FailingBulkClient(object): def __init__( self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) ): self.client = client self._called = 0 self._fail_at = fail_at self.transport = client.transport self._fail_with = fail_with async def bulk(self, *args, **kwargs): self._called += 1 if self._called in self._fail_at: raise self._fail_with return await self.client.bulk(*args, **kwargs) class TestStreamingBulk(object): async def test_actions_remain_unchanged(self, async_client): actions = [{"_id": 1}, {"_id": 2}] async for ok, item in helpers.async_streaming_bulk( async_client, actions, index="test-index" ): assert ok assert [{"_id": 1}, {"_id": 2}] == actions async def test_all_documents_get_inserted(self, async_client): docs = [{"answer": x, "_id": x} for x in range(100)] with warnings.catch_warnings(record=True) as w: async for ok, item in helpers.async_streaming_bulk( async_client, docs, index="test-index", refresh=True ): assert ok assert w == [] assert 100 == (await async_client.count(index="test-index"))["count"] assert {"answer": 42} == (await async_client.get(index="test-index", id=42))[ "_source" ] async def test_documents_data_types(self, async_client): async def async_gen(): for x in range(100): await asyncio.sleep(0) yield {"answer": x, "_id": x} def sync_gen(): for x in range(100): yield {"answer": x, "_id": x} async for ok, item in helpers.async_streaming_bulk( async_client, async_gen(), index="test-index", refresh=True ): assert ok assert 100 == (await async_client.count(index="test-index"))["count"] assert {"answer": 42} == (await async_client.get(index="test-index", id=42))[ "_source" ] await async_client.delete_by_query( index="test-index", body={"query": {"match_all": {}}} ) async for ok, item in helpers.async_streaming_bulk( async_client, sync_gen(), index="test-index", refresh=True ): assert ok assert 100 == (await async_client.count(index="test-index"))["count"] assert {"answer": 42} == (await async_client.get(index="test-index", id=42))[ "_source" ] async def test_all_errors_from_chunk_are_raised_on_failure(self, async_client): await async_client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) await async_client.cluster.health(wait_for_status="yellow") try: async for ok, item in helpers.async_streaming_bulk( async_client, [{"a": "b"}, {"a": "c"}], index="i", raise_on_error=True ): assert ok except helpers.BulkIndexError as e: assert 2 == len(e.errors) else: assert False, "exception should have been raised" async def test_different_op_types(self, async_client): await async_client.index(index="i", id=45, body={}) await async_client.index(index="i", id=42, body={}) docs = [ {"_index": "i", "_id": 47, "f": "v"}, {"_op_type": "delete", "_index": "i", "_id": 45}, {"_op_type": "update", "_index": "i", "_id": 42, "doc": {"answer": 42}}, ] async for ok, item in helpers.async_streaming_bulk(async_client, docs): assert ok assert not await async_client.exists(index="i", id=45) assert {"answer": 42} == (await async_client.get(index="i", id=42))["_source"] assert {"f": "v"} == (await async_client.get(index="i", id=47))["_source"] async def test_transport_error_can_becaught(self, async_client): failing_client = FailingBulkClient(async_client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, {"_index": "i", "_id": 45, "f": "v"}, {"_index": "i", "_id": 42, "f": "v"}, ] results = [ x async for x in helpers.async_streaming_bulk( failing_client, docs, raise_on_exception=False, raise_on_error=False, chunk_size=1, ) ] assert 3 == len(results) assert [True, False, True] == [r[0] for r in results] exc = results[1][1]["index"].pop("exception") assert isinstance(exc, TransportError) assert 599 == exc.status_code assert { "index": { "_index": "i", "_id": 45, "data": {"f": "v"}, "error": "TransportError(599, 'Error!')", "status": 599, } } == results[1][1] async def test_rejected_documents_are_retried(self, async_client): failing_client = FailingBulkClient( async_client, fail_with=TransportError(429, "Rejected!", {}) ) docs = [ {"_index": "i", "_id": 47, "f": "v"}, {"_index": "i", "_id": 45, "f": "v"}, {"_index": "i", "_id": 42, "f": "v"}, ] results = [ x async for x in helpers.async_streaming_bulk( failing_client, docs, raise_on_exception=False, raise_on_error=False, chunk_size=1, max_retries=1, initial_backoff=0, ) ] assert 3 == len(results) assert [True, True, True] == [r[0] for r in results] await async_client.indices.refresh(index="i") res = await async_client.search(index="i") assert {"value": 3, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called async def test_rejected_documents_are_retried_at_most_max_retries_times( self, async_client ): failing_client = FailingBulkClient( async_client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) docs = [ {"_index": "i", "_id": 47, "f": "v"}, {"_index": "i", "_id": 45, "f": "v"}, {"_index": "i", "_id": 42, "f": "v"}, ] results = [ x async for x in helpers.async_streaming_bulk( failing_client, docs, raise_on_exception=False, raise_on_error=False, chunk_size=1, max_retries=1, initial_backoff=0, ) ] assert 3 == len(results) assert [False, True, True] == [r[0] for r in results] await async_client.indices.refresh(index="i") res = await async_client.search(index="i") assert {"value": 2, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called async def test_transport_error_is_raised_with_max_retries(self, async_client): failing_client = FailingBulkClient( async_client, fail_at=(1, 2, 3, 4), fail_with=TransportError(429, "Rejected!", {}), ) async def streaming_bulk(): results = [ x async for x in helpers.async_streaming_bulk( failing_client, [{"a": 42}, {"a": 39}], raise_on_exception=True, max_retries=3, initial_backoff=0, ) ] return results with pytest.raises(TransportError): await streaming_bulk() assert 4 == failing_client._called class TestBulk(object): async def test_bulk_works_with_single_item(self, async_client): docs = [{"answer": 42, "_id": 1}] success, failed = await helpers.async_bulk( async_client, docs, index="test-index", refresh=True ) assert 1 == success assert not failed assert 1 == (await async_client.count(index="test-index"))["count"] assert {"answer": 42} == (await async_client.get(index="test-index", id=1))[ "_source" ] async def test_all_documents_get_inserted(self, async_client): docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await helpers.async_bulk( async_client, docs, index="test-index", refresh=True ) assert 100 == success assert not failed assert 100 == (await async_client.count(index="test-index"))["count"] assert {"answer": 42} == (await async_client.get(index="test-index", id=42))[ "_source" ] async def test_stats_only_reports_numbers(self, async_client): docs = [{"answer": x} for x in range(100)] success, failed = await helpers.async_bulk( async_client, docs, index="test-index", refresh=True, stats_only=True ) assert 100 == success assert 0 == failed assert 100 == (await async_client.count(index="test-index"))["count"] async def test_errors_are_reported_correctly(self, async_client): await async_client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) await async_client.cluster.health(wait_for_status="yellow") success, failed = await helpers.async_bulk( async_client, [{"a": 42}, {"a": "c", "_id": 42}], index="i", raise_on_error=False, ) assert 1 == success assert 1 == len(failed) error = failed[0] assert "42" == error["index"]["_id"] assert "i" == error["index"]["_index"] print(error["index"]["error"]) assert "MapperParsingException" in repr( error["index"]["error"] ) or "mapper_parsing_exception" in repr(error["index"]["error"]) async def test_error_is_raised(self, async_client): await async_client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) await async_client.cluster.health(wait_for_status="yellow") with pytest.raises(helpers.BulkIndexError): await helpers.async_bulk(async_client, [{"a": 42}, {"a": "c"}], index="i") async def test_ignore_error_if_raised(self, async_client): # ignore the status code 400 in tuple await helpers.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) ) # ignore the status code 400 in list await helpers.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=[ 400, ], ) # ignore the status code 400 await helpers.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=400 ) # ignore only the status code in the `ignore_status` argument with pytest.raises(helpers.BulkIndexError): await helpers.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(444,) ) # ignore transport error exception failing_client = FailingBulkClient(async_client) await helpers.async_bulk( failing_client, [{"a": 42}], index="i", ignore_status=(599,) ) async def test_errors_are_collected_properly(self, async_client): await async_client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) await async_client.cluster.health(wait_for_status="yellow") success, failed = await helpers.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", stats_only=True, raise_on_error=False, ) assert 1 == success assert 1 == failed class MockScroll: def __init__(self): self.calls = [] async def __call__(self, *args, **kwargs): self.calls.append((args, kwargs)) if len(self.calls) == 1: return { "_scroll_id": "dummy_id", "_shards": {"successful": 4, "total": 5, "skipped": 0}, "hits": {"hits": [{"scroll_data": 42}]}, } elif len(self.calls) == 2: return { "_scroll_id": "dummy_id", "_shards": {"successful": 4, "total": 5, "skipped": 0}, "hits": {"hits": []}, } else: raise Exception("no more responses") class MockResponse: def __init__(self, resp): self.resp = resp async def __call__(self, *args, **kwargs): return self.resp def __await__(self): return self().__await__() @pytest.fixture(scope="function") async def scan_teardown(async_client): yield await async_client.clear_scroll(scroll_id="_all") class TestScan(object): async def test_order_can_be_preserved(self, async_client, scan_teardown): bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) await async_client.bulk(bulk, refresh=True) with warnings.catch_warnings(record=True) as w: docs = [ doc async for doc in helpers.async_scan( async_client, index="test_index", query={"sort": "answer"}, preserve_order=True, ) ] assert w == [] assert 100 == len(docs) assert list(map(str, range(100))) == list(d["_id"] for d in docs) assert list(range(100)) == list(d["_source"]["answer"] for d in docs) async def test_all_documents_are_read(self, async_client, scan_teardown): bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) await async_client.bulk(bulk, refresh=True) docs = [ x async for x in helpers.async_scan(async_client, index="test_index", size=2) ] assert 100 == len(docs) assert set(map(str, range(100))) == set(d["_id"] for d in docs) assert set(range(100)) == set(d["_source"]["answer"] for d in docs) async def test_scroll_error(self, async_client, scan_teardown): bulk = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) await async_client.bulk(bulk, refresh=True) with patch.object(async_client, "scroll", MockScroll()): data = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, raise_on_error=False, clear_scroll=False, ) ] assert len(data) == 3 assert data[-1] == {"scroll_data": 42} with patch.object(async_client, "scroll", MockScroll()): with pytest.raises(ScanError): data = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, raise_on_error=True, clear_scroll=False, ) ] assert len(data) == 3 assert data[-1] == {"scroll_data": 42} async def test_initial_search_error(self, async_client, scan_teardown): with patch.object(async_client, "clear_scroll", new_callable=AsyncMock): with patch.object( async_client, "search", MockResponse( { "_scroll_id": "dummy_id", "_shards": {"successful": 4, "total": 5, "skipped": 0}, "hits": {"hits": [{"search_data": 1}]}, } ), ): with patch.object(async_client, "scroll", MockScroll()): data = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, raise_on_error=False, ) ] assert data == [{"search_data": 1}, {"scroll_data": 42}] with patch.object( async_client, "search", MockResponse( { "_scroll_id": "dummy_id", "_shards": {"successful": 4, "total": 5, "skipped": 0}, "hits": {"hits": [{"search_data": 1}]}, } ), ): with patch.object(async_client, "scroll", MockScroll()) as mock_scroll: with pytest.raises(ScanError): data = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, raise_on_error=True, ) ] assert data == [{"search_data": 1}] assert mock_scroll.calls == [] async def test_no_scroll_id_fast_route(self, async_client, scan_teardown): with patch.object(async_client, "search", MockResponse({"no": "_scroll_id"})): with patch.object(async_client, "scroll") as scroll_mock: with patch.object(async_client, "clear_scroll") as clear_mock: data = [ x async for x in helpers.async_scan( async_client, index="test_index" ) ] assert data == [] scroll_mock.assert_not_called() clear_mock.assert_not_called() @patch("elasticsearch._async.helpers.logger") async def test_logger(self, logger_mock, async_client, scan_teardown): bulk = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) await async_client.bulk(bulk, refresh=True) with patch.object(async_client, "scroll", MockScroll()): _ = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, raise_on_error=False, clear_scroll=False, ) ] logger_mock.warning.assert_called() with patch.object(async_client, "scroll", MockScroll()): try: _ = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, raise_on_error=True, clear_scroll=False, ) ] except ScanError: pass logger_mock.warning.assert_called_with( "Scroll request has only succeeded on %d (+%d skipped) shards out of %d.", 4, 0, 5, ) async def test_clear_scroll(self, async_client, scan_teardown): bulk = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) await async_client.bulk(bulk, refresh=True) with patch.object( async_client, "clear_scroll", wraps=async_client.clear_scroll ) as spy: _ = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2 ) ] spy.assert_called_once() spy.reset_mock() _ = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, clear_scroll=True ) ] spy.assert_called_once() spy.reset_mock() _ = [ x async for x in helpers.async_scan( async_client, index="test_index", size=2, clear_scroll=False ) ] spy.assert_not_called() @pytest.mark.parametrize( "kwargs", [ {"api_key": ("name", "value")}, {"http_auth": ("username", "password")}, {"headers": {"custom", "header"}}, ], ) async def test_scan_auth_kwargs_forwarded( self, async_client, scan_teardown, kwargs ): ((key, val),) = kwargs.items() with patch.object( async_client, "search", return_value=MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": [{"search_data": 1}]}, } ), ) as search_mock: with patch.object( async_client, "scroll", return_value=MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": []}, } ), ) as scroll_mock: with patch.object( async_client, "clear_scroll", return_value=MockResponse({}) ) as clear_mock: data = [ x async for x in helpers.async_scan( async_client, index="test_index", **kwargs ) ] assert data == [{"search_data": 1}] for api_mock in (search_mock, scroll_mock, clear_mock): assert api_mock.call_args[1][key] == val async def test_scan_auth_kwargs_favor_scroll_kwargs_option( self, async_client, scan_teardown ): with patch.object( async_client, "search", return_value=MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": [{"search_data": 1}]}, } ), ): with patch.object( async_client, "scroll", return_value=MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": []}, } ), ): with patch.object( async_client, "clear_scroll", return_value=MockResponse({}) ): data = [ x async for x in helpers.async_scan( async_client, index="test_index", headers={"not scroll": "kwargs"}, scroll_kwargs={ "headers": {"scroll": "kwargs"}, "sort": "asc", }, ) ] assert data == [{"search_data": 1}] # Assert that we see 'scroll_kwargs' options used instead of 'kwargs' assert async_client.scroll.call_args[1]["headers"] == { "scroll": "kwargs" } assert async_client.scroll.call_args[1]["sort"] == "asc" async def test_scan_duplicate_parameters(self, async_client): with patch.object(async_client, "search") as search_mock, patch.object( async_client, "scroll" ) as scroll_mock, patch.object( async_client, "clear_scroll" ) as clear_scroll_mock: search_mock.return_value = MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": [{"field": "value"}]}, } ) scroll_mock.return_value = MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": []}, } ) clear_scroll_mock.return_value = MockResponse({"acknowledged": True}) data = [ x async for x in helpers.async_scan( async_client, index="test_index", size=10, query={"size": 1}, scroll_kwargs={"scroll": "10m", "rest_total_hits_as_int": True}, ) ] assert data == [{"field": "value"}] search_mock.assert_called_with( index="test_index", size=10, sort="_doc", scroll="5m", request_timeout=None, params={"__elastic_client_meta": (("h", "s"),)}, ) scroll_mock.assert_called_with( scroll="5m", rest_total_hits_as_int=True, params={"__elastic_client_meta": (("h", "s"),)}, scroll_id="scroll_id", ) clear_scroll_mock.assert_called_with( scroll_id="scroll_id", ignore=(404,), params={"__elastic_client_meta": (("h", "s"),)}, ) @pytest.mark.parametrize( "scan_kwargs", [ {"from": 1}, {"from_": 1}, {"query": {"from": 1}}, {"query": {"from_": 1}}, {"query": {"query": {"match_all": {}}}, "from": 1}, {"query": {"query": {"match_all": {}}}, "from_": 1}, ], ) async def test_scan_from_keyword_is_aliased(self, async_client, scan_kwargs): with patch.object(async_client, "search") as search_mock, patch.object( async_client, "scroll" ) as scroll_mock, patch.object( async_client, "clear_scroll" ) as clear_scroll_mock: search_mock.return_value = MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": [{"field": "value"}]}, } ) scroll_mock.return_value = MockResponse( { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": []}, } ) clear_scroll_mock.return_value = MockResponse({"acknowledged": True}) [ x async for x in helpers.async_scan( async_client, index="test_index", **scan_kwargs ) ] assert search_mock.call_args[1]["from_"] == 1 assert "from" not in search_mock.call_args[1] @pytest.fixture(scope="function") async def reindex_setup(async_client): bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append( { "answer": x, "correct": x == 42, "type": "answers" if x % 2 == 0 else "questions", } ) await async_client.bulk(bulk, refresh=True) yield class TestReindex(object): async def test_reindex_passes_kwargs_to_scan_and_bulk( self, async_client, reindex_setup ): await helpers.async_reindex( async_client, "test_index", "prod_index", scan_kwargs={"q": "type:answers"}, bulk_kwargs={"refresh": True}, ) assert await async_client.indices.exists("prod_index") assert ( 50 == (await async_client.count(index="prod_index", q="type:answers"))["count"] ) assert {"answer": 42, "correct": True, "type": "answers"} == ( await async_client.get(index="prod_index", id=42) )["_source"] async def test_reindex_accepts_a_query(self, async_client, reindex_setup): await helpers.async_reindex( async_client, "test_index", "prod_index", query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, ) await async_client.indices.refresh() assert await async_client.indices.exists("prod_index") assert ( 50 == (await async_client.count(index="prod_index", q="type:answers"))["count"] ) assert {"answer": 42, "correct": True, "type": "answers"} == ( await async_client.get(index="prod_index", id=42) )["_source"] async def test_all_documents_get_moved(self, async_client, reindex_setup): await helpers.async_reindex(async_client, "test_index", "prod_index") await async_client.indices.refresh() assert await async_client.indices.exists("prod_index") assert ( 50 == (await async_client.count(index="prod_index", q="type:questions"))[ "count" ] ) assert ( 50 == (await async_client.count(index="prod_index", q="type:answers"))["count"] ) assert {"answer": 42, "correct": True, "type": "answers"} == ( await async_client.get(index="prod_index", id=42) )["_source"] @pytest.fixture(scope="function") async def parent_reindex_setup(async_client): body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { "properties": { "question_answer": { "type": "join", "relations": {"question": "answer"}, } } }, } await async_client.indices.create(index="test-index", body=body) await async_client.indices.create(index="real-index", body=body) await async_client.index( index="test-index", id=42, body={"question_answer": "question"} ) await async_client.index( index="test-index", id=47, routing=42, body={"some": "data", "question_answer": {"name": "answer", "parent": 42}}, ) await async_client.indices.refresh(index="test-index") class TestParentChildReindex: async def test_children_are_reindexed_correctly( self, async_client, parent_reindex_setup ): await helpers.async_reindex(async_client, "test-index", "real-index") q = await async_client.get(index="real-index", id=42) assert { "_id": "42", "_index": "real-index", "_primary_term": 1, "_seq_no": 0, "_source": {"question_answer": "question"}, "_type": "_doc", "_version": 1, "found": True, } == q q = await async_client.get(index="test-index", id=47, routing=42) assert { "_routing": "42", "_id": "47", "_index": "test-index", "_primary_term": 1, "_seq_no": 1, "_source": { "some": "data", "question_answer": {"name": "answer", "parent": 42}, }, "_type": "_doc", "_version": 1, "found": True, } == q @pytest.fixture(scope="function") async def reindex_data_stream_setup(async_client): dt = datetime.now(tz=timezone.utc) bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index_stream", "_id": x}}) bulk.append( { "answer": x, "correct": x == 42, "type": "answers" if x % 2 == 0 else "questions", "@timestamp": (dt - timedelta(days=x)).isoformat(), } ) await async_client.bulk(bulk, refresh=True) await async_client.indices.put_index_template( name="my-index-template", body={ "index_patterns": ["py-*-*"], "data_stream": {}, }, ) await async_client.indices.create_data_stream(name="py-test-stream") await async_client.indices.refresh() yield class TestAsyncDataStreamReindex(object): @pytest.mark.parametrize("op_type", [None, "create"]) async def test_reindex_index_datastream( self, op_type, async_client, reindex_data_stream_setup ): await helpers.async_reindex( async_client, source_index="test_index_stream", target_index="py-test-stream", scan_kwargs={"q": "type:answers"}, bulk_kwargs={"refresh": True}, op_type=op_type, ) # await async_client.indices.refresh() assert await async_client.indices.exists(index="py-test-stream") assert ( 50 == (await async_client.count(index="py-test-stream", q="type:answers"))[ "count" ] ) async def test_reindex_index_datastream_op_type_index( self, async_client, reindex_data_stream_setup ): with pytest.raises( ValueError, match="Data streams must have 'op_type' set to 'create'" ): await helpers.async_reindex( async_client, source_index="test_index_stream", target_index="py-test-stream", query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, op_type="_index", ) elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_server/test_mapbox_vector_tile.py000066400000000000000000000130261426163262700327570ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import io import logging import re import pytest from elasticsearch import AsyncElasticsearch, RequestError from elasticsearch.helpers.test import CA_CERTS, ELASTICSEARCH_URL pytestmark = pytest.mark.asyncio @pytest.fixture(scope="function") async def mvt_setup(async_client): await async_client.indices.create( index="museums", body={ "mappings": { "properties": { "location": {"type": "geo_point"}, "name": {"type": "keyword"}, "price": {"type": "long"}, "included": {"type": "boolean"}, } } }, ) await async_client.bulk( index="museums", body=[ {"index": {"_id": "1"}}, { "location": "52.374081,4.912350", "name": "NEMO Science Museum", "price": 1750, "included": True, }, {"index": {"_id": "2"}}, { "location": "52.369219,4.901618", "name": "Museum Het Rembrandthuis", "price": 1500, "included": False, }, {"index": {"_id": "3"}}, { "location": "52.371667,4.914722", "name": "Nederlands Scheepvaartmuseum", "price": 1650, "included": True, }, {"index": {"_id": "4"}}, { "location": "52.371667,4.914722", "name": "Amsterdam Centre for Architecture", "price": 0, "included": True, }, ], refresh=True, ) async def test_mapbox_vector_tile_logging(mvt_setup): client = AsyncElasticsearch(ELASTICSEARCH_URL, ca_certs=CA_CERTS) await client.info() output = io.StringIO() handler = logging.StreamHandler(output) logger = logging.getLogger("elasticsearch") logger.addHandler(handler) logger.setLevel(logging.DEBUG) try: await client.search_mvt( index="museums", zoom=13, x=4207, y=2692, field="location", ) finally: logger.removeHandler(handler) handler.flush() logs = output.getvalue() print(logs) assert re.search( r"^POST https?://[^/]+/museums/_mvt/location/13/4207/2692 \[status:200 request:0\.[0-9]{3}s\]\n" r"> None\n" r"< b'.+'$", logs, flags=re.DOTALL, ) output = io.StringIO() handler = logging.StreamHandler(output) logger = logging.getLogger("elasticsearch") logger.addHandler(handler) # Errors should still be JSON try: with pytest.raises(RequestError) as e: await client.search_mvt( index="museums", zoom=-100, x=4207, y=2692, field="location", ) finally: logger.removeHandler(handler) assert str(e.value) == ( "RequestError(400, 'illegal_argument_exception', " "'Invalid geotile_grid precision of -100. Must be between 0 and 29.')" ) assert e.value.status_code == 400 handler.flush() logs = output.getvalue() assert re.search( r"^POST https?://[^/]+/museums/_mvt/location/-100/4207/2692 \[status:400 request:0\.[0-9]{3}s\]\n", logs, flags=re.DOTALL, ) # The JSON error body is still logged properly. assert logs.endswith( '> None\n< {"error":{"root_cause":[{"type":"illegal_argument_exception","reason":"Invalid ' 'geotile_grid precision of -100. Must be between 0 and 29."}],"type":"illegal_argument_exception",' '"reason":"Invalid geotile_grid precision of -100. Must be between 0 and 29."},"status":400}\n' ) async def test_mapbox_vector_tile_response(mvt_setup): try: import mapbox_vector_tile except ImportError: return pytest.skip(reason="Requires the 'mapbox-vector-tile' package") client = AsyncElasticsearch(ELASTICSEARCH_URL, ca_certs=CA_CERTS) resp = await client.search_mvt( index="museums", zoom=13, x=4207, y=2692, field="location", grid_precision=2, fields=["name", "price"], query={"term": {"included": True}}, aggs={ "min_price": {"min": {"field": "price"}}, "max_price": {"max": {"field": "price"}}, "avg_price": {"avg": {"field": "price"}}, }, ) assert isinstance(resp, bytes) # Decode the binary as MVT tile = mapbox_vector_tile.decode(resp) # Assert some general things about the structure, mostly we want # to know that we got back a valid MVT. assert set(tile.keys()) == {"hits", "aggs", "meta"} elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_server/test_rest_api_spec.py000066400000000000000000000230211426163262700317060ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Dynamically generated set of TestCases based on set of yaml files decribing some integration tests. These files are shared among all official Elasticsearch clients. """ import inspect import re import warnings import pytest from elasticsearch import ElasticsearchWarning, RequestError from elasticsearch.helpers.test import _get_version from ...test_server.test_rest_api_spec import ( APIS_USING_TYPE_INSTEAD_OF_DOC_TYPE, APIS_WITH_BODY_FIELDS, COMPATIBILITY_MIMETYPE, COMPATIBILITY_MODE_ENABLED, IMPLEMENTED_FEATURES, PARAMS_RENAMES, RUN_ASYNC_REST_API_TESTS, YAML_TEST_SPECS, YamlRunner, ) pytestmark = pytest.mark.asyncio XPACK_FEATURES = None ES_VERSION = None async def await_if_coro(x): if inspect.iscoroutine(x): return await x return x class AsyncYamlRunner(YamlRunner): async def setup(self): # Pull skips from individual tests to not do unnecessary setup. skip_code = [] for action in self._run_code: assert len(action) == 1 action_type, _ = list(action.items())[0] if action_type == "skip": skip_code.append(action) else: break if self._setup_code or skip_code: self.section("setup") if skip_code: await self.run_code(skip_code) if self._setup_code: await self.run_code(self._setup_code) async def teardown(self): if self._teardown_code: self.section("teardown") await self.run_code(self._teardown_code) async def es_version(self): global ES_VERSION if ES_VERSION is None: version_string = (await self.client.info())["version"]["number"] if "." not in version_string: return () version = version_string.strip().split(".") ES_VERSION = tuple(int(v) if v.isdigit() else 999 for v in version) return ES_VERSION def section(self, name): print(("=" * 10) + " " + name + " " + ("=" * 10)) async def run(self): try: await self.setup() self.section("test") await self.run_code(self._run_code) finally: try: await self.teardown() except Exception: pass async def run_code(self, test): """Execute an instruction based on it's type.""" for action in test: assert len(action) == 1 action_type, action = list(action.items())[0] print(action_type, action) if hasattr(self, "run_" + action_type): await await_if_coro(getattr(self, "run_" + action_type)(action)) else: raise RuntimeError("Invalid action type %r" % (action_type,)) async def run_do(self, action): api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) warn = action.pop("warnings", ()) allowed_warnings = action.pop("allowed_warnings", ()) if isinstance(allowed_warnings, str): allowed_warnings = (allowed_warnings,) allowed_warnings_regex = action.pop("allowed_warnings_regex", ()) if isinstance(allowed_warnings_regex, str): allowed_warnings_regex = (allowed_warnings_regex,) assert len(action) == 1 # Remove the x_pack_rest_user authentication # if it's given via headers. We're already authenticated # via the 'elastic' user. if ( headers and headers.get("Authorization", None) == "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" ): headers.pop("Authorization") if headers and "Content-Type" in headers and COMPATIBILITY_MODE_ENABLED: headers["Content-Type"] = COMPATIBILITY_MIMETYPE method, args = list(action.items())[0] args["headers"] = headers # locate api endpoint for m in method.split("."): # Some deprecated APIs are prefixed with 'xpack-*' if m.startswith("xpack-"): m = m.replace("xpack-", "") assert hasattr(api, m) api = getattr(api, m) # some parameters had to be renamed to not clash with python builtins, # compensate for k in PARAMS_RENAMES: # Don't do the 'doc_type' rename for APIs that actually want 'type' if k == "type" and method in APIS_USING_TYPE_INSTEAD_OF_DOC_TYPE: continue if k in args: args[PARAMS_RENAMES[k]] = args.pop(k) # resolve vars for k in args: args[k] = self._resolve(args[k]) # If there's a body parameter given to an API with # body fields enabled we expand the body to parameters. if ( "body" in args and isinstance(args["body"], dict) and method in APIS_WITH_BODY_FIELDS ): args.update( {PARAMS_RENAMES.get(k, k): v for k, v in args.pop("body").items()} ) warnings.simplefilter("always", category=ElasticsearchWarning) with warnings.catch_warnings(record=True) as caught_warnings: try: self.last_response = await api(**args) except Exception as e: if not catch: raise self.run_catch(catch, e) else: if catch: raise AssertionError( "Failed to catch %r in %r." % (catch, self.last_response) ) # Filter out warnings raised by other components. caught_warnings = [ str(w.message) for w in caught_warnings if w.category == ElasticsearchWarning and (not allowed_warnings or str(w.message) not in allowed_warnings) and ( not allowed_warnings_regex or all( re.search(pattern, str(w.message)) is None for pattern in allowed_warnings_regex ) ) ] # This warning can show up in many places but isn't accounted for # in tests, so we remove it to make sure things pass. include_type_name_warning = ( "[types removal] Using include_type_name in create index requests is deprecated. " "The parameter will be removed in the next major version." ) if ( include_type_name_warning in caught_warnings and include_type_name_warning not in warn ): caught_warnings.remove(include_type_name_warning) # Sorting removes the issue with order raised. We only care about # if all warnings are raised in the single API call. if warn and sorted(warn) != sorted(caught_warnings): raise AssertionError( "Expected warnings not equal to actual warnings: expected=%r actual=%r" % (warn, caught_warnings) ) async def run_skip(self, skip): if "features" in skip: features = skip["features"] if not isinstance(features, (tuple, list)): features = [features] for feature in features: if feature in IMPLEMENTED_FEATURES: continue pytest.skip("feature '%s' is not supported" % feature) if "version" in skip: version, reason = skip["version"], skip["reason"] if version == "all": pytest.skip(reason) min_version, max_version = version.split("-") min_version = _get_version(min_version) or (0,) max_version = _get_version(max_version) or (999,) if min_version <= (await self.es_version()) <= max_version: pytest.skip(reason) async def _feature_enabled(self, name): global XPACK_FEATURES if XPACK_FEATURES is None: try: xinfo = await self.client.xpack.info() XPACK_FEATURES = set( f for f in xinfo["features"] if xinfo["features"][f]["enabled"] ) IMPLEMENTED_FEATURES.add("xpack") except RequestError: XPACK_FEATURES = set() IMPLEMENTED_FEATURES.add("no_xpack") return name in XPACK_FEATURES @pytest.fixture(scope="function") def async_runner(async_client): return AsyncYamlRunner(async_client) if RUN_ASYNC_REST_API_TESTS: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) async def test_rest_api_spec(test_spec, async_runner): if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") async_runner.use_spec(test_spec) await async_runner.run() elasticsearch-py-7.17.6/test_elasticsearch/test_async/test_transport.py000066400000000000000000001000471426163262700265610ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import asyncio import json import re import warnings import pytest from mock import patch from elasticsearch import AsyncTransport from elasticsearch.connection import Connection from elasticsearch.connection_pool import DummyConnectionPool from elasticsearch.exceptions import ( AuthenticationException, AuthorizationException, ConnectionError, ElasticsearchWarning, NotFoundError, TransportError, UnsupportedProductError, ) from elasticsearch.transport import _ProductChecker pytestmark = pytest.mark.asyncio class DummyConnection(Connection): def __init__(self, **kwargs): self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) self.delay = kwargs.pop("delay", 0) self.calls = [] self.closed = False super(DummyConnection, self).__init__(**kwargs) async def perform_request(self, *args, **kwargs): if self.closed: raise RuntimeError("This connection is closed") if self.delay: await asyncio.sleep(self.delay) self.calls.append((args, kwargs)) if self.exception: raise self.exception return self.status, self.headers, self.data async def close(self): if self.closed: raise RuntimeError("This connection is already closed") self.closed = True CLUSTER_NODES = """{ "_nodes" : { "total" : 1, "successful" : 1, "failed" : 0 }, "cluster_name" : "elasticsearch", "nodes" : { "SRZpKFZdQguhhvifmN6UVA" : { "name" : "SRZpKFZ", "transport_address" : "127.0.0.1:9300", "host" : "127.0.0.1", "ip" : "127.0.0.1", "version" : "5.0.0", "build_hash" : "253032b", "roles" : [ "master", "data", "ingest" ], "http" : { "bound_address" : [ "[fe80::1]:9200", "[::1]:9200", "127.0.0.1:9200" ], "publish_address" : "1.1.1.1:123", "max_content_length_in_bytes" : 104857600 } } } }""" CLUSTER_NODES_7x_PUBLISH_HOST = """{ "_nodes" : { "total" : 1, "successful" : 1, "failed" : 0 }, "cluster_name" : "elasticsearch", "nodes" : { "SRZpKFZdQguhhvifmN6UVA" : { "name" : "SRZpKFZ", "transport_address" : "127.0.0.1:9300", "host" : "127.0.0.1", "ip" : "127.0.0.1", "version" : "5.0.0", "build_hash" : "253032b", "roles" : [ "master", "data", "ingest" ], "http" : { "bound_address" : [ "[fe80::1]:9200", "[::1]:9200", "127.0.0.1:9200" ], "publish_address" : "somehost.tld/1.1.1.1:123", "max_content_length_in_bytes" : 104857600 } } } }""" class TestTransport: async def test_single_connection_uses_dummy_connection_pool(self): t = AsyncTransport([{}]) await t._async_call() assert isinstance(t.connection_pool, DummyConnectionPool) t = AsyncTransport([{"host": "localhost"}]) await t._async_call() assert isinstance(t.connection_pool, DummyConnectionPool) async def test_request_timeout_extracted_from_params_and_passed(self): t = AsyncTransport([{}], connection_class=DummyConnection, meta_header=False) t._verified_elasticsearch = True await t.perform_request("GET", "/", params={"request_timeout": 42}) assert 1 == len(t.get_connection().calls) assert ("GET", "/", {}, None) == t.get_connection().calls[0][0] assert { "timeout": 42, "ignore": (), "headers": None, } == t.get_connection().calls[0][1] async def test_opaque_id(self): t = AsyncTransport( [{}], opaque_id="app-1", connection_class=DummyConnection, meta_header=False ) t._verified_elasticsearch = True await t.perform_request("GET", "/") assert 1 == len(t.get_connection().calls) assert ("GET", "/", None, None) == t.get_connection().calls[0][0] assert { "timeout": None, "ignore": (), "headers": None, } == t.get_connection().calls[0][1] # Now try with an 'x-opaque-id' set on perform_request(). await t.perform_request("GET", "/", headers={"x-opaque-id": "request-1"}) assert 2 == len(t.get_connection().calls) assert ("GET", "/", None, None) == t.get_connection().calls[1][0] assert { "timeout": None, "ignore": (), "headers": {"x-opaque-id": "request-1"}, } == t.get_connection().calls[1][1] async def test_request_with_custom_user_agent_header(self): t = AsyncTransport([{}], connection_class=DummyConnection, meta_header=False) t._verified_elasticsearch = True await t.perform_request( "GET", "/", headers={"user-agent": "my-custom-value/1.2.3"} ) assert 1 == len(t.get_connection().calls) assert { "timeout": None, "ignore": (), "headers": {"user-agent": "my-custom-value/1.2.3"}, } == t.get_connection().calls[0][1] async def test_send_get_body_as_source(self): with warnings.catch_warnings(record=True) as w: t = AsyncTransport( [{}], send_get_body_as="source", connection_class=DummyConnection ) assert len(w) == 1 assert str(w[0].message) == ( "The 'send_get_body_as' parameter is no longer necessary and will be removed in 8.0" ) t._verified_elasticsearch = True await t.perform_request("GET", "/", body={}) assert 1 == len(t.get_connection().calls) assert ("GET", "/", {"source": "{}"}, None) == t.get_connection().calls[0][0] async def test_send_get_body_as_post(self): with warnings.catch_warnings(record=True) as w: t = AsyncTransport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) assert len(w) == 1 assert str(w[0].message) == ( "The 'send_get_body_as' parameter is no longer necessary and will be removed in 8.0" ) t._verified_elasticsearch = True await t.perform_request("GET", "/", body={}) assert 1 == len(t.get_connection().calls) assert ("POST", "/", None, b"{}") == t.get_connection().calls[0][0] async def test_client_meta_header(self): t = AsyncTransport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True await t.perform_request("GET", "/", body={}) assert len(t.get_connection().calls) == 1 headers = t.get_connection().calls[0][1]["headers"] assert re.match( r"^es=[0-9.]+p?,py=[0-9.]+p?,t=[0-9.]+p?$", headers["x-elastic-client-meta"], ) class DummyConnectionWithMeta(DummyConnection): HTTP_CLIENT_META = ("dm", "1.2.3") t = AsyncTransport([{}], connection_class=DummyConnectionWithMeta) t._verified_elasticsearch = True await t.perform_request("GET", "/", body={}, headers={"Custom": "header"}) assert len(t.get_connection().calls) == 1 headers = t.get_connection().calls[0][1]["headers"] assert re.match( r"^es=[0-9.]+p?,py=[0-9.]+p?,t=[0-9.]+p?,dm=1.2.3$", headers["x-elastic-client-meta"], ) assert headers["Custom"] == "header" async def test_client_meta_header_not_sent(self): t = AsyncTransport([{}], meta_header=False, connection_class=DummyConnection) t._verified_elasticsearch = True await t.perform_request("GET", "/", body={}) assert len(t.get_connection().calls) == 1 headers = t.get_connection().calls[0][1]["headers"] assert headers is None async def test_body_gets_encoded_into_bytes(self): t = AsyncTransport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True await t.perform_request("GET", "/", body="你好") assert 1 == len(t.get_connection().calls) assert ( "GET", "/", None, b"\xe4\xbd\xa0\xe5\xa5\xbd", ) == t.get_connection().calls[0][0] async def test_body_bytes_get_passed_untouched(self): t = AsyncTransport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True body = b"\xe4\xbd\xa0\xe5\xa5\xbd" await t.perform_request("GET", "/", body=body) assert 1 == len(t.get_connection().calls) assert ("GET", "/", None, body) == t.get_connection().calls[0][0] async def test_body_surrogates_replaced_encoded_into_bytes(self): t = AsyncTransport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True await t.perform_request("GET", "/", body="你好\uda6a") assert 1 == len(t.get_connection().calls) assert ( "GET", "/", None, b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa", ) == t.get_connection().calls[0][0] async def test_kwargs_passed_on_to_connections(self): t = AsyncTransport([{"host": "google.com"}], port=123) t._verified_elasticsearch = True await t._async_call() assert 1 == len(t.connection_pool.connections) assert "http://google.com:123" == t.connection_pool.connections[0].host async def test_kwargs_passed_on_to_connection_pool(self): dt = object() t = AsyncTransport([{}, {}], dead_timeout=dt) t._verified_elasticsearch = True await t._async_call() assert dt is t.connection_pool.dead_timeout async def test_custom_connection_class(self): class MyConnection(object): def __init__(self, **kwargs): self.kwargs = kwargs t = AsyncTransport([{}], connection_class=MyConnection) t._verified_elasticsearch = True await t._async_call() assert 1 == len(t.connection_pool.connections) assert isinstance(t.connection_pool.connections[0], MyConnection) def test_add_connection(self): t = AsyncTransport([{}], randomize_hosts=False) t._verified_elasticsearch = True t.add_connection({"host": "google.com", "port": 1234}) assert 2 == len(t.connection_pool.connections) assert "http://google.com:1234" == t.connection_pool.connections[1].host async def test_request_will_fail_after_X_retries(self): t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, ) t._verified_elasticsearch = True connection_error = False try: await t.perform_request("GET", "/") except ConnectionError: connection_error = True assert connection_error assert 4 == len(t.get_connection().calls) async def test_failed_connection_will_be_marked_as_dead(self): t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, ) t._verified_elasticsearch = True connection_error = False try: await t.perform_request("GET", "/") except ConnectionError: connection_error = True assert connection_error assert 0 == len(t.connection_pool.connections) async def test_resurrected_connection_will_be_marked_as_live_on_success(self): for method in ("GET", "HEAD"): t = AsyncTransport([{}, {}], connection_class=DummyConnection) t._verified_elasticsearch = True await t._async_call() con1 = t.connection_pool.get_connection() con2 = t.connection_pool.get_connection() t.connection_pool.mark_dead(con1) t.connection_pool.mark_dead(con2) await t.perform_request(method, "/") assert 1 == len(t.connection_pool.connections) assert 1 == len(t.connection_pool.dead_count) async def test_sniff_will_use_seed_connections(self): t = AsyncTransport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) await t._async_call() t.set_connections([{"data": "invalid"}]) await t.sniff_hosts() assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_fetches_and_uses_nodes_list(self): t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, ) await t._async_call() await t.sniffing_task # Need to wait for the sniffing task to complete assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host async def test_sniff_on_start_ignores_sniff_timeout(self): t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, sniff_timeout=12, ) await t._async_call() await t.sniffing_task # Need to wait for the sniffing task to complete assert (("GET", "/_nodes/_all/http"), {"timeout": None}) == t.seed_connections[ 0 ].calls[0] async def test_sniff_uses_sniff_timeout(self): t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_timeout=42, ) await t._async_call() await t.sniff_hosts() assert (("GET", "/_nodes/_all/http"), {"timeout": 42}) == t.seed_connections[ 0 ].calls[0] async def test_sniff_reuses_connection_instances_if_possible(self): t = AsyncTransport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, randomize_hosts=False, ) await t._async_call() connection = t.connection_pool.connections[1] connection.delay = 3.0 # Add this delay to make the sniffing deterministic. await t.sniff_hosts() assert 1 == len(t.connection_pool.connections) assert connection is t.get_connection() async def test_sniff_on_fail_triggers_sniffing_on_fail(self): t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=0, randomize_hosts=False, ) t._verified_elasticsearch = True await t._async_call() connection_error = False try: await t.perform_request("GET", "/") except ConnectionError: connection_error = True await t.sniffing_task # Need to wait for the sniffing task to complete assert connection_error assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host @patch("elasticsearch._async.transport.AsyncTransport.sniff_hosts") async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): sniff_hosts.side_effect = [TransportError("sniff failed")] t = AsyncTransport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=3, randomize_hosts=False, ) t._verified_elasticsearch = True await t._async_init() conn_err, conn_data = t.connection_pool.connections response = await t.perform_request("GET", "/") assert json.loads(CLUSTER_NODES) == response assert 1 == sniff_hosts.call_count assert 1 == len(conn_err.calls) assert 1 == len(conn_data.calls) async def test_sniff_after_n_seconds(self, event_loop): t = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniffer_timeout=5, ) t._verified_elasticsearch = True await t._async_call() for _ in range(4): await t.perform_request("GET", "/") assert 1 == len(t.connection_pool.connections) assert isinstance(t.get_connection(), DummyConnection) t.last_sniff = event_loop.time() - 5.1 await t.perform_request("GET", "/") await t.sniffing_task # Need to wait for the sniffing task to complete assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host assert event_loop.time() - 1 < t.last_sniff < event_loop.time() + 0.01 async def test_sniff_7x_publish_host(self): # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t = AsyncTransport( [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, ) t._verified_elasticsearch = True await t._async_call() await t.sniff_hosts() # Ensure we parsed out the fqdn and port from the fqdn/ip:port string. assert t.connection_pool.connection_opts[0][1] == { "host": "somehost.tld", "port": 123, } @patch("elasticsearch._async.transport.AsyncTransport.sniff_hosts") async def test_sniffing_disabled_on_cloud_instances(self, sniff_hosts): t = AsyncTransport( [{}], sniff_on_start=True, sniff_on_connection_fail=True, connection_class=DummyConnection, cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", ) t._verified_elasticsearch = True await t._async_call() assert not t.sniff_on_connection_fail assert sniff_hosts.call_args is None # Assert not called. await t.perform_request("GET", "/", body={}) assert 1 == len(t.get_connection().calls) assert ("GET", "/", None, b"{}") == t.get_connection().calls[0][0] async def test_transport_close_closes_all_pool_connections(self): t = AsyncTransport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True await t._async_call() assert not any([conn.closed for conn in t.connection_pool.connections]) await t.close() assert all([conn.closed for conn in t.connection_pool.connections]) t = AsyncTransport([{}, {}], connection_class=DummyConnection) t._verified_elasticsearch = True await t._async_call() assert not any([conn.closed for conn in t.connection_pool.connections]) await t.close() assert all([conn.closed for conn in t.connection_pool.connections]) async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop): t = AsyncTransport( [ {"data": ""}, {"data": ""}, {"data": ""}, ], connection_class=DummyConnection, sniff_on_start=True, ) t._verified_elasticsearch = True # If our initial sniffing attempt comes back # empty then we raise an error. with pytest.raises(TransportError) as e: await t._async_call() assert str(e.value) == "TransportError(N/A, 'Unable to sniff hosts.')" async def test_sniff_on_start_waits_for_sniff_to_complete(self, event_loop): t = AsyncTransport( [ {"delay": 1, "data": ""}, {"delay": 1, "data": ""}, {"delay": 1, "data": CLUSTER_NODES}, ], connection_class=DummyConnection, sniff_on_start=True, ) t._verified_elasticsearch = True # Start the timer right before the first task # and have a bunch of tasks come in immediately. tasks = [] start_time = event_loop.time() for _ in range(5): tasks.append(event_loop.create_task(t._async_call())) await asyncio.sleep(0) # Yield to the loop assert t.sniffing_task is not None # Tasks streaming in later. for _ in range(5): tasks.append(event_loop.create_task(t._async_call())) await asyncio.sleep(0.1) # Now that all the API calls have come in we wait for # them all to resolve before await asyncio.gather(*tasks) end_time = event_loop.time() duration = end_time - start_time # All the tasks blocked on the sniff of each node # and then resolved immediately after. assert 1 <= duration < 2 async def test_sniff_on_start_close_unlocks_async_calls(self, event_loop): t = AsyncTransport( [ {"delay": 10, "data": CLUSTER_NODES}, ], connection_class=DummyConnection, sniff_on_start=True, ) t._verified_elasticsearch = True # Start making _async_calls() before we cancel tasks = [] start_time = event_loop.time() for _ in range(3): tasks.append(event_loop.create_task(t._async_call())) await asyncio.sleep(0) # Close the transport while the sniffing task is active! :( await t.close() # Now we start waiting on all those _async_calls() await asyncio.gather(*tasks) end_time = event_loop.time() duration = end_time - start_time # A lot quicker than 10 seconds defined in 'delay' assert duration < 1 @pytest.mark.parametrize( ["headers", "data"], [ ( {}, '{"version":{"number":"6.99.0"},"tagline":"You Know, for Search"}', ), ( {}, '{"version":{"number":"7.13.0","build_flavor":"default"},"tagline":"You Know, for Search"}', ), ( {"X-elastic-product": "Elasticsearch"}, '{"version":{"number":"7.14.0","build_flavor":"default"},"tagline":"You Know, for Search"}', ), ], ) async def test_verify_elasticsearch(self, headers, data): t = AsyncTransport( [{"data": data, "headers": headers}], connection_class=DummyConnection ) await t.perform_request("GET", "/_search") assert t._verified_elasticsearch is True calls = t.connection_pool.connections[0].calls _ = [call[1]["headers"].pop("x-elastic-client-meta") for call in calls] assert calls == [ ( ("GET", "/"), { "headers": { "accept": "application/json", }, "timeout": None, }, ), ( ("GET", "/_search", None, None), { "headers": {}, "ignore": (), "timeout": None, }, ), ] @pytest.mark.parametrize( "exception_cls", [AuthorizationException, AuthenticationException] ) async def test_verify_elasticsearch_skips_on_auth_errors(self, exception_cls): t = AsyncTransport( [{"exception": exception_cls(exception_cls.status_code)}], connection_class=DummyConnection, ) with pytest.warns(ElasticsearchWarning) as warns: with pytest.raises(exception_cls): await t.perform_request( "GET", "/_search", headers={"Authorization": "testme"} ) # Assert that a warning was raised due to security privileges assert [str(w.message) for w in warns] == [ "The client is unable to verify that the server is " "Elasticsearch due security privileges on the server side" ] # Assert that the cluster is "verified" assert t._verified_elasticsearch is True # See that the headers were passed along to the "info" request made calls = t.connection_pool.connections[0].calls _ = [call[1]["headers"].pop("x-elastic-client-meta") for call in calls] assert calls == [ ( ("GET", "/"), { "headers": { "accept": "application/json", "authorization": "testme", }, "timeout": None, }, ), ( ("GET", "/_search", None, None), { "headers": { "Authorization": "testme", }, "ignore": (), "timeout": None, }, ), ] async def test_multiple_requests_verify_elasticsearch_success(self, event_loop): t = AsyncTransport( [ { "data": '{"version":{"number":"7.13.0","build_flavor":"default"},"tagline":"You Know, for Search"}', "delay": 1, } ], connection_class=DummyConnection, ) results = [] completed_at = [] async def request_task(): try: results.append(await t.perform_request("GET", "/_search")) except Exception as e: results.append(e) completed_at.append(event_loop.time()) # Execute a bunch of requests concurrently. tasks = [] start_time = event_loop.time() for _ in range(10): tasks.append(event_loop.create_task(request_task())) await asyncio.gather(*tasks) end_time = event_loop.time() # Exactly 10 results completed assert len(results) == 10 # No errors in the results assert all(isinstance(result, dict) for result in results) # Assert that this took longer than 2 seconds but less than 2.1 seconds duration = end_time - start_time assert 2 <= duration <= 2.1 # Assert that every result came after ~2 seconds, no fast completions. assert all( 2 <= completed_time - start_time <= 2.1 for completed_time in completed_at ) # Assert that the cluster is "verified" assert t._verified_elasticsearch is True # See that the first request is always 'GET /' for ES check calls = t.connection_pool.connections[0].calls assert calls[0][0] == ("GET", "/") # The rest of the requests are 'GET /_search' afterwards assert all(call[0][:2] == ("GET", "/_search") for call in calls[1:]) @pytest.mark.parametrize( ["build_flavor", "tagline", "product_error", "error_message"], [ ( "default", "BAD TAGLINE", _ProductChecker.UNSUPPORTED_PRODUCT, "The client noticed that the server is not Elasticsearch and we do not support this unknown product", ), ( "BAD BUILD FLAVOR", "BAD TAGLINE", _ProductChecker.UNSUPPORTED_PRODUCT, "The client noticed that the server is not Elasticsearch and we do not support this unknown product", ), ( "BAD BUILD FLAVOR", "You Know, for Search", _ProductChecker.UNSUPPORTED_DISTRIBUTION, "The client noticed that the server is not a supported distribution of Elasticsearch", ), ], ) async def test_multiple_requests_verify_elasticsearch_product_error( self, event_loop, build_flavor, tagline, product_error, error_message ): t = AsyncTransport( [ { "data": '{"version":{"number":"7.13.0","build_flavor":"%s"},"tagline":"%s"}' % (build_flavor, tagline), "delay": 1, } ], connection_class=DummyConnection, ) results = [] completed_at = [] async def request_task(): try: results.append(await t.perform_request("GET", "/_search")) except Exception as e: results.append(e) completed_at.append(event_loop.time()) # Execute a bunch of requests concurrently. tasks = [] start_time = event_loop.time() for _ in range(10): tasks.append(event_loop.create_task(request_task())) await asyncio.gather(*tasks) end_time = event_loop.time() # Exactly 10 results completed assert len(results) == 10 # All results were errors assert all(isinstance(result, UnsupportedProductError) for result in results) assert all(str(result) == error_message for result in results) # Assert that one request was made but not 2 requests. duration = end_time - start_time assert 1 <= duration <= 1.1 # Assert that every result came after ~1 seconds, no fast completions. assert all( 1 <= completed_time - start_time <= 1.1 for completed_time in completed_at ) # Assert that the cluster is definitely not Elasticsearch assert t._verified_elasticsearch == product_error # See that the first request is always 'GET /' for ES check calls = t.connection_pool.connections[0].calls assert calls[0][0] == ("GET", "/") # The rest of the requests are 'GET /_search' afterwards assert all(call[0][:2] == ("GET", "/_search") for call in calls[1:]) @pytest.mark.parametrize("error_cls", [ConnectionError, NotFoundError]) async def test_multiple_requests_verify_elasticsearch_retry_on_errors( self, event_loop, error_cls ): t = AsyncTransport( [ { "exception": error_cls(), "delay": 0.1, } ], connection_class=DummyConnection, ) results = [] completed_at = [] async def request_task(): try: results.append(await t.perform_request("GET", "/_search")) except Exception as e: results.append(e) completed_at.append(event_loop.time()) # Execute a bunch of requests concurrently. tasks = [] start_time = event_loop.time() for _ in range(5): tasks.append(event_loop.create_task(request_task())) await asyncio.gather(*tasks) end_time = event_loop.time() # Exactly 5 results completed assert len(results) == 5 # All results were errors and not wrapped in 'NotElasticsearchError' assert all(isinstance(result, error_cls) for result in results) # Assert that 5 requests were made in total (5 transport requests per x 0.1s/conn request) duration = end_time - start_time assert 0.5 <= duration <= 0.6 # Assert that the cluster is still in the unknown/unverified stage. assert t._verified_elasticsearch is None # See that the API isn't hit, instead it's the index requests that are failing. calls = t.connection_pool.connections[0].calls assert len(calls) == 5 assert all(call[0] == ("GET", "/") for call in calls) elasticsearch-py-7.17.6/test_elasticsearch/test_cases.py000066400000000000000000000050311426163262700234440ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from collections import defaultdict from unittest import SkipTest # noqa: F401 from unittest import TestCase from elasticsearch import Elasticsearch class DummyTransport(object): def __init__(self, hosts, responses=None, **kwargs): self.hosts = hosts self.responses = responses self.call_count = 0 self.calls = defaultdict(list) def perform_request(self, method, url, params=None, headers=None, body=None): resp = 200, {} if self.responses: resp = self.responses[self.call_count] self.call_count += 1 self.calls[(method, url)].append((params, headers, body)) return resp class ElasticsearchTestCase(TestCase): def setUp(self): super(ElasticsearchTestCase, self).setUp() self.client = Elasticsearch(transport_class=DummyTransport) def assert_call_count_equals(self, count): self.assertEqual(count, self.client.transport.call_count) def assert_url_called(self, method, url, count=1): self.assertIn((method, url), self.client.transport.calls) calls = self.client.transport.calls[(method, url)] self.assertEqual(count, len(calls)) return calls class TestElasticsearchTestCase(ElasticsearchTestCase): def test_our_transport_used(self): self.assertIsInstance(self.client.transport, DummyTransport) def test_start_with_0_call(self): self.assert_call_count_equals(0) def test_each_call_is_recorded(self): self.client.transport.perform_request("GET", "/") self.client.transport.perform_request("DELETE", "/42", params={}, body="body") self.assert_call_count_equals(2) self.assertEqual( [({}, None, "body")], self.assert_url_called("DELETE", "/42", 1) ) elasticsearch-py-7.17.6/test_elasticsearch/test_client/000077500000000000000000000000001426163262700232535ustar00rootroot00000000000000elasticsearch-py-7.17.6/test_elasticsearch/test_client/__init__.py000066400000000000000000000132201426163262700253620ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import warnings from elasticsearch.client import Elasticsearch, _normalize_hosts from ..test_cases import ElasticsearchTestCase, TestCase class TestNormalizeHosts(TestCase): def test_none_uses_defaults(self): self.assertEqual([{}], _normalize_hosts(None)) def test_strings_are_used_as_hostnames(self): self.assertEqual([{"host": "elastic.co"}], _normalize_hosts(["elastic.co"])) def test_strings_are_parsed_for_port_and_user(self): self.assertEqual( [ {"host": "elastic.co", "port": 42}, {"host": "elastic.co", "http_auth": "user:secre]"}, ], _normalize_hosts(["elastic.co:42", "user:secre%5D@elastic.co"]), ) def test_strings_are_parsed_for_scheme(self): self.assertEqual( [ {"host": "elastic.co", "port": 42, "use_ssl": True}, { "host": "elastic.co", "http_auth": "user:secret", "use_ssl": True, "port": 443, "url_prefix": "/prefix", }, ], _normalize_hosts( ["https://elastic.co:42", "https://user:secret@elastic.co/prefix"] ), ) def test_dicts_are_left_unchanged(self): self.assertEqual( [{"host": "local", "extra": 123}], _normalize_hosts([{"host": "local", "extra": 123}]), ) def test_single_string_is_wrapped_in_list(self): self.assertEqual([{"host": "elastic.co"}], _normalize_hosts("elastic.co")) class TestClient(ElasticsearchTestCase): def test_request_timeout_is_passed_through_unescaped(self): self.client.ping(request_timeout=0.1) calls = self.assert_url_called("HEAD", "/") self.assertEqual([({"request_timeout": 0.1}, {}, None)], calls) def test_params_is_copied_when(self): rt = object() params = dict(request_timeout=rt) self.client.ping(params=params) self.client.ping(params=params) calls = self.assert_url_called("HEAD", "/", 2) self.assertEqual( [({"request_timeout": rt}, {}, None), ({"request_timeout": rt}, {}, None)], calls, ) self.assertFalse(calls[0][0] is calls[1][0]) def test_headers_is_copied_when(self): hv = "value" headers = dict(Authentication=hv) self.client.ping(headers=headers) self.client.ping(headers=headers) calls = self.assert_url_called("HEAD", "/", 2) self.assertEqual( [({}, {"authentication": hv}, None), ({}, {"authentication": hv}, None)], calls, ) self.assertFalse(calls[0][0] is calls[1][0]) def test_from_in_search(self): self.client.search(index="i", from_=10) calls = self.assert_url_called("POST", "/i/_search") self.assertEqual([({"from": "10"}, {}, None)], calls) def test_repr_contains_hosts(self): self.assertEqual("", repr(self.client)) def test_repr_subclass(self): class OtherElasticsearch(Elasticsearch): pass self.assertEqual("", repr(OtherElasticsearch())) def test_repr_contains_hosts_passed_in(self): self.assertIn("es.org", repr(Elasticsearch(["es.org:123"]))) def test_repr_truncates_host_to_5(self): hosts = [{"host": "es" + str(i)} for i in range(10)] es = Elasticsearch(hosts) self.assertNotIn("es5", repr(es)) self.assertIn("...", repr(es)) def test_index_uses_post_if_id_is_empty(self): self.client.index(index="my-index", id="", body={}) self.assert_url_called("POST", "/my-index/_doc") def test_index_uses_put_if_id_is_not_empty(self): self.client.index(index="my-index", id=0, body={}) self.assert_url_called("PUT", "/my-index/_doc/0") def test_tasks_get_without_task_id_deprecated(self): warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get() self.assert_url_called("GET", "/_tasks") self.assertEqual(len(w), 1) self.assertIs(w[0].category, DeprecationWarning) self.assertEqual( str(w[0].message), "Calling client.tasks.get() without a task_id is deprecated " "and will be removed in v8.0. Use client.tasks.list() instead.", ) def test_tasks_get_with_task_id_not_deprecated(self): warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get("task-1") self.client.tasks.get(task_id="task-2") self.assert_url_called("GET", "/_tasks/task-1") self.assert_url_called("GET", "/_tasks/task-2") self.assertEqual(len(w), 0) elasticsearch-py-7.17.6/test_elasticsearch/test_client/test_cluster.py000066400000000000000000000035211426163262700263460ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from test_elasticsearch.test_cases import ElasticsearchTestCase class TestCluster(ElasticsearchTestCase): def test_stats_without_node_id(self): self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") def test_stats_with_node_id(self): self.client.cluster.stats("node-1") self.assert_url_called("GET", "/_cluster/stats/nodes/node-1") self.client.cluster.stats(node_id="node-2") self.assert_url_called("GET", "/_cluster/stats/nodes/node-2") def test_state_with_index_without_metric_defaults_to_all(self): self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") self.client.cluster.state(metric="cluster_name") self.assert_url_called("GET", "/_cluster/state/cluster_name") self.client.cluster.state(index="index-1") self.assert_url_called("GET", "/_cluster/state/_all/index-1") self.client.cluster.state(index="index-1", metric="cluster_name") self.assert_url_called("GET", "/_cluster/state/cluster_name/index-1") elasticsearch-py-7.17.6/test_elasticsearch/test_client/test_document.py000066400000000000000000000025501426163262700265040ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from test_elasticsearch.test_cases import ElasticsearchTestCase class TestIndices(ElasticsearchTestCase): def test_create_document(self): self.client.create(index="test-index", id="test-id", document={"key": "value"}) self.assert_url_called("PUT", "/test-index/_create/test-id") def test_create_document_with_type(self): self.client.create( index="test-index", id="test-id", doc_type="test-type", document={"key": "value"}, ) self.assert_url_called("PUT", "/test-index/test-type/test-id/_create") elasticsearch-py-7.17.6/test_elasticsearch/test_client/test_indices.py000066400000000000000000000035771426163262700263160ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from test_elasticsearch.test_cases import ElasticsearchTestCase class TestIndices(ElasticsearchTestCase): def test_create_one_index(self): self.client.indices.create(index="test-index") self.assert_url_called("PUT", "/test-index") def test_delete_multiple_indices(self): self.client.indices.delete(index=["test-index", "second.index", "third/index"]) self.assert_url_called("DELETE", "/test-index,second.index,third%2Findex") def test_exists_index(self): self.client.indices.exists(index="second.index,third/index") self.assert_url_called("HEAD", "/second.index,third%2Findex") def test_passing_empty_value_for_required_param_raises_exception(self): self.assertRaises(ValueError, self.client.indices.exists, index=None) self.assertRaises(ValueError, self.client.indices.exists, index=[]) self.assertRaises(ValueError, self.client.indices.exists, index="") def test_put_mapping_without_index(self): self.client.indices.put_mapping(doc_type="doc-type", body={}) self.assert_url_called("PUT", "/_all/doc-type/_mapping") elasticsearch-py-7.17.6/test_elasticsearch/test_client/test_overrides.py000066400000000000000000000253351426163262700266760ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import warnings import pytest from test_elasticsearch.test_cases import ElasticsearchTestCase class TestOverriddenUrlTargets(ElasticsearchTestCase): def test_create(self): self.client.create(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_create/test-id") self.client.create( index="test-index", doc_type="test-type", id="test-id", body={} ) self.assert_url_called("PUT", "/test-index/test-type/test-id/_create") def test_delete(self): self.client.delete(index="test-index", id="test-id") self.assert_url_called("DELETE", "/test-index/_doc/test-id") self.client.delete(index="test-index", doc_type="test-type", id="test-id") self.assert_url_called("DELETE", "/test-index/test-type/test-id") def test_exists(self): self.client.exists(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_doc/test-id") self.client.exists(index="test-index", doc_type="test-type", id="test-id") self.assert_url_called("HEAD", "/test-index/test-type/test-id") def test_explain(self): self.client.explain(index="test-index", id="test-id") self.assert_url_called("POST", "/test-index/_explain/test-id") self.client.explain(index="test-index", doc_type="test-type", id="test-id") self.assert_url_called("POST", "/test-index/test-type/test-id/_explain") def test_get(self): self.client.get(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_doc/test-id") self.client.get(index="test-index", doc_type="test-type", id="test-id") self.assert_url_called("GET", "/test-index/test-type/test-id") def test_get_source(self): self.client.get_source(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_source/test-id") self.client.get_source(index="test-index", doc_type="test-type", id="test-id") self.assert_url_called("GET", "/test-index/test-type/test-id/_source") def test_exists_source(self): self.client.exists_source(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_source/test-id") self.client.exists_source( index="test-index", doc_type="test-type", id="test-id" ) self.assert_url_called("HEAD", "/test-index/test-type/test-id/_source") def test_index(self): self.client.index(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_doc") self.client.index(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_doc/test-id") self.client.index(index="test-index", doc_type="test-type", body={}) self.assert_url_called("POST", "/test-index/test-type") self.client.index( index="test-index", doc_type="test-type", id="test-id", body={} ) self.assert_url_called("PUT", "/test-index/test-type/test-id") self.client.index(index="test-index", doc_type="_doc", body={}) self.assert_url_called("POST", "/test-index/_doc", count=2) self.client.index(index="test-index", doc_type="_doc", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_doc/test-id", count=2) def test_termvectors(self): self.client.termvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_termvectors") self.client.termvectors(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_termvectors/test-id") self.client.termvectors(index="test-index", doc_type="test-type", body={}) self.assert_url_called("POST", "/test-index/test-type/_termvectors") self.client.termvectors( index="test-index", doc_type="test-type", id="test-id", body={} ) self.assert_url_called("POST", "/test-index/test-type/test-id/_termvectors") def test_mtermvectors(self): self.client.mtermvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_mtermvectors") self.client.mtermvectors(index="test-index", doc_type="test-type", body={}) self.assert_url_called("POST", "/test-index/test-type/_mtermvectors") def test_update(self): self.client.update(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_update/test-id") self.client.update( index="test-index", doc_type="test-type", id="test-id", body={} ) self.assert_url_called("POST", "/test-index/test-type/test-id/_update") def test_cluster_state(self): self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") self.client.cluster.state(index="test-index") self.assert_url_called("GET", "/_cluster/state/_all/test-index") self.client.cluster.state(index="test-index", metric="test-metric") self.assert_url_called("GET", "/_cluster/state/test-metric/test-index") def test_cluster_stats(self): self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") self.client.cluster.stats(node_id="test-node") self.assert_url_called("GET", "/_cluster/stats/nodes/test-node") def test_indices_put_mapping(self): self.client.indices.put_mapping(body={}) self.assert_url_called("PUT", "/_mapping") self.client.indices.put_mapping(index="test-index", body={}) self.assert_url_called("PUT", "/test-index/_mapping") self.client.indices.put_mapping( index="test-index", doc_type="test-type", body={} ) self.assert_url_called("PUT", "/test-index/test-type/_mapping") self.client.indices.put_mapping(doc_type="test-type", body={}) self.assert_url_called("PUT", "/_all/test-type/_mapping") def test_tasks_get(self): with pytest.warns(DeprecationWarning): self.client.tasks.get() def test_scroll(self): self.client.scroll( scroll_id="scroll-id", scroll="5m", rest_total_hits_as_int=True ) calls = self.client.transport.calls assert calls == { ("POST", "/_search/scroll"): [ ( {"rest_total_hits_as_int": b"true"}, {"accept": "application/json", "content-type": "application/json"}, {"scroll": "5m", "scroll_id": "scroll-id"}, ) ] } def test_clear_scroll(self): self.client.clear_scroll(scroll_id="scroll-id") calls = self.client.transport.calls assert calls == { ("DELETE", "/_search/scroll"): [ ( {}, {"accept": "application/json", "content-type": "application/json"}, {"scroll_id": "scroll-id"}, ) ] } def test_doc_type_works_for_apis_with_type(self): with warnings.catch_warnings(record=True) as w: self.client.license.post_start_trial(type="trial") assert w == [] calls = self.client.transport.calls assert calls == { ("POST", "/_license/start_trial"): [ ({"type": b"trial"}, {"accept": "application/json"}, None) ] } self.client.transport.calls.pop(("POST", "/_license/start_trial")) with warnings.catch_warnings(record=True) as w: self.client.license.post_start_trial(params={"type": "trial"}) assert w == [] calls = self.client.transport.calls assert calls == { ("POST", "/_license/start_trial"): [ ({"type": "trial"}, {"accept": "application/json"}, None) ] } self.client.transport.calls.pop(("POST", "/_license/start_trial")) # Now we try using 'doc_type' in all the same places and see # that things still work but we get deprecation warnings. with pytest.warns(DeprecationWarning) as w: self.client.license.post_start_trial(doc_type="trial") assert str(w[0].message) == ( "The 'doc_type' parameter is deprecated, use 'type' for this API instead. See " "https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) calls = self.client.transport.calls assert calls == { ("POST", "/_license/start_trial"): [ ({"type": b"trial"}, {"accept": "application/json"}, None) ] } self.client.transport.calls.pop(("POST", "/_license/start_trial")) with pytest.warns(DeprecationWarning) as w: self.client.license.post_start_trial(params={"doc_type": "trial"}) assert str(w[0].message) == ( "The 'doc_type' parameter is deprecated, use 'type' for this API instead. See " "https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) calls = self.client.transport.calls assert calls == { ("POST", "/_license/start_trial"): [ ({"type": "trial"}, {"accept": "application/json"}, None) ] } elasticsearch-py-7.17.6/test_elasticsearch/test_client/test_utils.py000066400000000000000000000441551426163262700260350ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import os import warnings import pytest from elasticsearch.client.utils import _bulk_body, _escape, _make_path, query_params from elasticsearch.compat import PY2 from ..test_cases import SkipTest, TestCase class TestQueryParams(TestCase): def setup_method(self, _): self.calls = [] @query_params("simple_param") def func_to_wrap(self, *args, **kwargs): self.calls.append((args, kwargs)) @query_params( "query_only", "query_and_body", body_params=["query_and_body", "body_only", "from_"], ) def func_with_body_params(self, *args, **kwargs): self.calls.append((args, kwargs)) @query_params( "query_only", "query_and_body", body_params=["query_and_body", "body_only"], body_required=True, ) def func_with_body_params_required(self, *args, **kwargs): self.calls.append((args, kwargs)) @query_params("query_only", body_name="named_body") def func_with_named_body(self, *args, **kwargs): self.calls.append((args, kwargs)) @query_params( request_mimetypes=["application/json"], response_mimetypes=["text/plain", "application/json"], ) def func_with_mimetypes(self, *args, **kwargs): self.calls.append((args, kwargs)) def test_handles_params(self): self.func_to_wrap(params={"simple_param_2": "2"}, simple_param="3") self.assertEqual( self.calls, [ ( (), { "params": {"simple_param": b"3", "simple_param_2": "2"}, "headers": {}, }, ) ], ) def test_handles_headers(self): self.func_to_wrap(headers={"X-Opaque-Id": "app-1"}) self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "app-1"}})] ) def test_handles_opaque_id(self): self.func_to_wrap(opaque_id="request-id") self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "request-id"}})] ) def test_handles_empty_none_and_normalization(self): self.func_to_wrap(params=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) self.func_to_wrap(headers=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) self.func_to_wrap(headers=None, params=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) self.func_to_wrap(headers={}, params={}) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) self.func_to_wrap(headers={"X": "y"}) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {"x": "y"}})) def test_per_call_authentication(self): self.func_to_wrap(api_key=("name", "key")) self.assertEqual( self.calls[-1], ((), {"headers": {"authorization": "ApiKey bmFtZTprZXk="}, "params": {}}), ) self.func_to_wrap(http_auth=("user", "password")) self.assertEqual( self.calls[-1], ( (), { "headers": {"authorization": "Basic dXNlcjpwYXNzd29yZA=="}, "params": {}, }, ), ) self.func_to_wrap(http_auth="abcdef") self.assertEqual( self.calls[-1], ((), {"headers": {"authorization": "Basic abcdef"}, "params": {}}), ) # If one or the other is 'None' it's all good! self.func_to_wrap(http_auth=None, api_key=None) self.assertEqual(self.calls[-1], ((), {"headers": {}, "params": {}})) self.func_to_wrap(http_auth="abcdef", api_key=None) self.assertEqual( self.calls[-1], ((), {"headers": {"authorization": "Basic abcdef"}, "params": {}}), ) # If both are given values an error is raised. with self.assertRaises(ValueError) as e: self.func_to_wrap(http_auth="key", api_key=("1", "2")) self.assertEqual( str(e.exception), "Only one of 'http_auth' and 'api_key' may be passed at a time", ) def test_body_params(self): with warnings.catch_warnings(record=True) as w: # No params, should be same as an empty call self.func_with_body_params() assert self.calls[-1] == ((), {"headers": {}, "params": {}}) # No overlap with 'body_params' self.func_with_body_params(query_only=1) assert self.calls[-1] == ( (), {"headers": {}, "params": {"query_only": "1"}}, ) # One body parameter self.func_with_body_params(query_and_body=1) assert self.calls[-1] == ( (), {"body": {"query_and_body": 1}, "headers": {}, "params": {}}, ) self.func_with_body_params(body_only=1) assert self.calls[-1] == ( (), {"body": {"body_only": 1}, "headers": {}, "params": {}}, ) # Multiple body field parameters self.func_with_body_params(query_and_body=1, body_only=1) assert self.calls[-1] == ( (), { "body": {"query_and_body": 1, "body_only": 1}, "headers": {}, "params": {}, }, ) # All the parameters self.func_with_body_params(query_only=1, query_and_body=1, body_only=1) assert self.calls[-1] == ( (), { "body": {"query_and_body": 1, "body_only": 1}, "headers": {}, "params": {"query_only": "1"}, }, ) # There should be no 'DeprecationWarnings' # emitted for any of the above cases. assert w == [] # Positional arguments pass-through self.func_with_body_params(1) assert self.calls[-1] == ( (1,), {"headers": {}, "params": {}}, ) # Positional arguments disable body serialization self.func_with_body_params(1, query_and_body=1) assert self.calls[-1] == ( (1,), {"headers": {}, "params": {"query_and_body": "1"}}, ) def test_body_params_errors(self): with pytest.raises(TypeError) as e: self.func_with_body_params(body={}, body_only=True) assert str(e.value) == ( "The 'body_only' parameter is only serialized in the request body " "and can't be combined with the 'body' parameter. Either stop using " "the 'body' parameter and use keyword-arguments only or move the " "specified parameters into the 'body'. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) # Positional arguments disable body serialization with pytest.raises(TypeError) as e: self.func_with_body_params(1, body_only=1) assert str(e.value) == ( "The 'body_only' parameter is only serialized in the request body " "and can't be combined with the 'body' parameter. Either stop using " "the 'body' parameter and use keyword-arguments only or move the " "specified parameters into the 'body'. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) def test_body_params_deprecations(self): # APIs with body_params deprecate the 'body' parameter. with pytest.warns(DeprecationWarning) as w: self.func_with_body_params(body={}) assert self.calls[-1] == ((), {"body": {}, "headers": {}, "params": {}}) assert len(w) == 1 assert w[0].category == DeprecationWarning assert str(w[0].message) == ( "The 'body' parameter is deprecated for the " "'func_with_body_params' API and will be removed in a future version. " "Instead use API parameters directly. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) # APIs that don't have body parameters don't have a deprecated 'body' parameter with warnings.catch_warnings(record=True) as w: self.func_to_wrap(body={}) assert self.calls[-1] == ((), {"body": {}, "headers": {}, "params": {}}) assert w == [] # Positional arguments are deprecated for all APIs with pytest.warns(DeprecationWarning) as w: self.func_to_wrap(1) assert self.calls[-1] == ((1,), {"headers": {}, "params": {}}) assert len(w) == 1 assert w[0].category == DeprecationWarning assert str(w[0].message) == ( "Using positional arguments for APIs is deprecated and will be disabled in " "8.0.0. Instead use only keyword arguments for all APIs. See https://github.com/" "elastic/elasticsearch-py/issues/1698 for more information" ) def test_body_params_removes_underscore_suffix(self): self.func_with_body_params(from_=0) assert self.calls[-1] == ( (), {"body": {"from": 0}, "headers": {}, "params": {}}, ) def test_named_body_params(self): # Passing 'named_body' results in no error or warning with warnings.catch_warnings(record=True) as w: self.func_with_named_body(named_body=[]) assert self.calls[-1] == ((), {"body": [], "headers": {}, "params": {}}) assert w == [] # Passing 'body' is a warning but works with warnings.catch_warnings(record=True) as w: self.func_with_named_body(body=[]) assert self.calls[-1] == ((), {"body": [], "headers": {}, "params": {}}) assert len(w) == 1 assert str(w[0].message) == ( "The 'body' parameter is deprecated for the 'func_with_named_body' " "API and will be removed in a future version. Instead use the 'named_body' parameter. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) # Passing both 'named_body' and 'body' is an error self.calls[:] = [] with warnings.catch_warnings(record=True) as w: with pytest.raises(TypeError) as e: self.func_with_named_body(named_body=[], body=[]) assert self.calls == [] assert w == [] assert str(e.value) == ( "Can't use 'named_body' and 'body' parameters together because 'named_body' " "is an alias for 'body'. Instead you should only use the 'named_body' parameter. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) # Positional arguments aren't impacted. Only warning is for positional args with warnings.catch_warnings(record=True) as w: self.func_with_named_body([]) assert self.calls == [(([],), {"headers": {}, "params": {}})] assert len(w) == 1 assert str(w[0].message) == ( "Using positional arguments for APIs is deprecated and will be disabled in " "8.0.0. Instead use only keyword arguments for all APIs. " "See https://github.com/elastic/elasticsearch-py/issues/1698 for more information" ) def test_body_required_with_body_fields(self): self.func_with_body_params_required(query_only=True) assert self.calls[-1] == ( (), {"body": {}, "headers": {}, "params": {"query_only": b"true"}}, ) self.func_with_body_params_required(body_only=True) assert self.calls[-1] == ( (), {"body": {"body_only": True}, "headers": {}, "params": {}}, ) self.func_with_body_params_required(query_and_body=True) assert self.calls[-1] == ( (), {"body": {"query_and_body": True}, "headers": {}, "params": {}}, ) self.func_with_body_params_required(body={}) assert self.calls[-1] == ((), {"body": {}, "headers": {}, "params": {}}) self.func_with_body_params_required(body={"hello": "world"}) assert self.calls[-1] == ( (), {"body": {"hello": "world"}, "headers": {}, "params": {}}, ) self.func_with_body_params_required() assert self.calls[-1] == ((), {"body": {}, "headers": {}, "params": {}}) def test_mimetype_headers(self): compat_envvar = os.environ.pop("ELASTIC_CLIENT_APIVERSIONING", None) try: self.func_with_mimetypes() assert self.calls[-1] == ( (), { "headers": { "accept": "text/plain,application/json", "content-type": "application/json", }, "params": {}, }, ) self.func_with_mimetypes(headers={}) assert self.calls[-1] == ( (), { "headers": { "accept": "text/plain,application/json", "content-type": "application/json", }, "params": {}, }, ) self.func_with_mimetypes( headers={ "Content-Type": "application/x-octet-stream", "AccepT": "application/x-octet-stream", } ) assert self.calls[-1] == ( (), { "headers": { "accept": "application/x-octet-stream", "content-type": "application/x-octet-stream", }, "params": {}, }, ) finally: if compat_envvar: os.environ["ELASTIC_CLIENT_APIVERSIONING"] = compat_envvar def test_mimetype_headers_compatibility_mode(self): compat_envvar = os.environ.pop("ELASTIC_CLIENT_APIVERSIONING", None) try: for compat_mode_enabled in ["true", "1"]: os.environ["ELASTIC_CLIENT_APIVERSIONING"] = compat_mode_enabled self.func_with_mimetypes() assert self.calls[-1] == ( (), { "headers": { "accept": "text/plain,application/vnd.elasticsearch+json;compatible-with=7", "content-type": "application/vnd.elasticsearch+json;compatible-with=7", }, "params": {}, }, ) self.func_with_mimetypes(headers={"Content-Type": "text/plain"}) assert self.calls[-1] == ( (), { "headers": { "accept": "text/plain,application/vnd.elasticsearch+json;compatible-with=7", "content-type": "text/plain", }, "params": {}, }, ) finally: if compat_envvar: os.environ["ELASTIC_CLIENT_APIVERSIONING"] = compat_envvar class TestMakePath(TestCase): def test_handles_unicode(self): id = "中文" self.assertEqual( "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) ) def test_handles_utf_encoded_string(self): if not PY2: raise SkipTest("Only relevant for py2") id = "中文".encode("utf-8") self.assertEqual( "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) ) class TestEscape(TestCase): def test_handles_ascii(self): string = "abc123" self.assertEqual(b"abc123", _escape(string)) def test_handles_unicode(self): string = "中文" self.assertEqual(b"\xe4\xb8\xad\xe6\x96\x87", _escape(string)) def test_handles_bytestring(self): string = b"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0" self.assertEqual(string, _escape(string)) class TestBulkBody(TestCase): def test_proper_bulk_body_as_string_is_not_modified(self): string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(string_body, _bulk_body(None, string_body)) def test_proper_bulk_body_as_bytestring_is_not_modified(self): bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(bytestring_body, _bulk_body(None, bytestring_body)) def test_bulk_body_as_string_adds_trailing_newline(self): string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', _bulk_body(None, string_body), ) def test_bulk_body_as_bytestring_adds_trailing_newline(self): bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', _bulk_body(None, bytestring_body), ) elasticsearch-py-7.17.6/test_elasticsearch/test_connection.py000066400000000000000000001161111426163262700245070ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import gzip import io import json import re import ssl import warnings from platform import python_version import pytest import urllib3 from mock import Mock, patch from requests.auth import AuthBase from urllib3._collections import HTTPHeaderDict from elasticsearch import Elasticsearch, __versionstr__ from elasticsearch.compat import reraise_exceptions from elasticsearch.connection import ( Connection, RequestsHttpConnection, Urllib3HttpConnection, ) from elasticsearch.exceptions import ( ConflictError, ConnectionError, NotFoundError, RequestError, TransportError, ) from .test_cases import SkipTest, TestCase CLOUD_ID_PORT_443 = "cluster:d2VzdGV1cm9wZS5henVyZS5lbGFzdGljLWNsb3VkLmNvbTo0NDMkZTdkZTlmMTM0NWU0NDkwMjgzZDkwM2JlNWI2ZjkxOWUk" CLOUD_ID_KIBANA = "cluster:d2VzdGV1cm9wZS5henVyZS5lbGFzdGljLWNsb3VkLmNvbSQ4YWY3ZWUzNTQyMGY0NThlOTAzMDI2YjQwNjQwODFmMiQyMDA2MTU1NmM1NDA0OTg2YmZmOTU3ZDg0YTZlYjUxZg==" CLOUD_ID_PORT_AND_KIBANA = "cluster:d2VzdGV1cm9wZS5henVyZS5lbGFzdGljLWNsb3VkLmNvbTo5MjQzJGM2NjM3ZjMxMmM1MjQzY2RhN2RlZDZlOTllM2QyYzE5JA==" CLOUD_ID_NO_PORT_OR_KIBANA = "cluster:d2VzdGV1cm9wZS5henVyZS5lbGFzdGljLWNsb3VkLmNvbSRlN2RlOWYxMzQ1ZTQ0OTAyODNkOTAzYmU1YjZmOTE5ZSQ=" def gzip_decompress(data): buf = gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") return buf.read() class TestBaseConnection(TestCase): def test_parse_cloud_id(self): # Embedded port in cloud_id con = Connection(cloud_id=CLOUD_ID_PORT_AND_KIBANA) self.assertEqual( con.host, "https://c6637f312c5243cda7ded6e99e3d2c19.westeurope.azure.elastic-cloud.com:9243", ) self.assertEqual(con.port, 9243) self.assertEqual( con.hostname, "c6637f312c5243cda7ded6e99e3d2c19.westeurope.azure.elastic-cloud.com", ) # Embedded port but overridden con = Connection( cloud_id=CLOUD_ID_PORT_AND_KIBANA, port=443, ) self.assertEqual( con.host, "https://c6637f312c5243cda7ded6e99e3d2c19.westeurope.azure.elastic-cloud.com:443", ) self.assertEqual(con.port, 443) self.assertEqual( con.hostname, "c6637f312c5243cda7ded6e99e3d2c19.westeurope.azure.elastic-cloud.com", ) # Port is 443, removed by default. con = Connection(cloud_id=CLOUD_ID_PORT_443) self.assertEqual( con.host, "https://e7de9f1345e4490283d903be5b6f919e.westeurope.azure.elastic-cloud.com", ) self.assertEqual(con.port, None) self.assertEqual( con.hostname, "e7de9f1345e4490283d903be5b6f919e.westeurope.azure.elastic-cloud.com", ) # No port, contains Kibana UUID con = Connection(cloud_id=CLOUD_ID_KIBANA) self.assertEqual( con.host, "https://8af7ee35420f458e903026b4064081f2.westeurope.azure.elastic-cloud.com", ) self.assertEqual(con.port, None) self.assertEqual( con.hostname, "8af7ee35420f458e903026b4064081f2.westeurope.azure.elastic-cloud.com", ) def test_empty_warnings(self): con = Connection() with warnings.catch_warnings(record=True) as w: con._raise_warnings(()) con._raise_warnings([]) self.assertEqual(w, []) def test_raises_warnings(self): con = Connection() with warnings.catch_warnings(record=True) as warn: con._raise_warnings(['299 Elasticsearch-7.6.1-aa751 "this is deprecated"']) self.assertEqual([str(w.message) for w in warn], ["this is deprecated"]) with warnings.catch_warnings(record=True) as warn: con._raise_warnings( [ '299 Elasticsearch-7.6.1-aa751 "this is also deprecated"', '299 Elasticsearch-7.6.1-aa751 "this is also deprecated"', '299 Elasticsearch-7.6.1-aa751 "guess what? deprecated"', ] ) self.assertEqual( [str(w.message) for w in warn], ["this is also deprecated", "guess what? deprecated"], ) def test_raises_warnings_when_folded(self): con = Connection() with warnings.catch_warnings(record=True) as warn: con._raise_warnings( [ '299 Elasticsearch-7.6.1-aa751 "warning",' '299 Elasticsearch-7.6.1-aa751 "folded"', ] ) self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) def test_ipv6_host_and_port(self): for kwargs, expected_host in [ ({"host": "::1"}, "http://[::1]:9200"), ({"host": "::1", "port": 443}, "http://[::1]:443"), ({"host": "::1", "use_ssl": True}, "https://[::1]:9200"), ({"host": "127.0.0.1", "port": 1234}, "http://127.0.0.1:1234"), ({"host": "localhost", "use_ssl": True}, "https://localhost:9200"), ]: conn = Connection(**kwargs) assert conn.host == expected_host def test_meta_header(self): conn = Connection(meta_header=True) assert conn.meta_header is True conn = Connection(meta_header=False) assert conn.meta_header is False with pytest.raises(TypeError) as e: Connection(meta_header=1) assert str(e.value) == "meta_header must be of type bool" class TestUrllib3Connection(TestCase): def _get_mock_connection( self, connection_params={}, response_body=b"{}", response_headers={} ): con = Urllib3HttpConnection(**connection_params) def _dummy_urlopen(*args, **kwargs): dummy_response = Mock() dummy_response.headers = HTTPHeaderDict(response_headers) dummy_response.status = 200 dummy_response.data = response_body _dummy_urlopen.call_args = (args, kwargs) return dummy_response con.pool.urlopen = _dummy_urlopen return con def test_ssl_context(self): try: context = ssl.create_default_context() except AttributeError: # if create_default_context raises an AttributeError Exception # it means SSLContext is not available for that version of python # and we should skip this test. raise SkipTest( "Test test_ssl_context is skipped cause SSLContext is not available for this version of ptyhon" ) con = Urllib3HttpConnection(use_ssl=True, ssl_context=context) self.assertEqual(len(con.pool.conn_kw.keys()), 1) self.assertIsInstance(con.pool.conn_kw["ssl_context"], ssl.SSLContext) self.assertTrue(con.use_ssl) def test_opaque_id(self): con = Urllib3HttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") def test_http_cloud_id(self): con = Urllib3HttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==" ) self.assertTrue(con.use_ssl) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) self.assertEqual(con.port, None) self.assertEqual( con.hostname, "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) self.assertTrue(con.http_compress) con = Urllib3HttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", port=9243, ) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io:9243", ) self.assertEqual(con.port, 9243) self.assertEqual( con.hostname, "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) def test_api_key_auth(self): # test with tuple con = Urllib3HttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", api_key=("elastic", "changeme1"), ) self.assertEqual( con.headers["authorization"], "ApiKey ZWxhc3RpYzpjaGFuZ2VtZTE=" ) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) # test with base64 encoded string con = Urllib3HttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", api_key="ZWxhc3RpYzpjaGFuZ2VtZTI=", ) self.assertEqual( con.headers["authorization"], "ApiKey ZWxhc3RpYzpjaGFuZ2VtZTI=" ) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) def test_no_http_compression(self): con = self._get_mock_connection() self.assertFalse(con.http_compress) self.assertNotIn("accept-encoding", con.headers) con.perform_request("GET", "/") (_, _, req_body), kwargs = con.pool.urlopen.call_args self.assertFalse(req_body) self.assertNotIn("accept-encoding", kwargs["headers"]) self.assertNotIn("content-encoding", kwargs["headers"]) def test_http_compression(self): con = self._get_mock_connection({"http_compress": True}) self.assertTrue(con.http_compress) self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") # 'content-encoding' shouldn't be set at a connection level. # Should be applied only if the request is sent with a body. self.assertNotIn("content-encoding", con.headers) con.perform_request("GET", "/", body=b"{}") (_, _, req_body), kwargs = con.pool.urlopen.call_args self.assertEqual(gzip_decompress(req_body), b"{}") self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") self.assertEqual(kwargs["headers"]["content-encoding"], "gzip") con.perform_request("GET", "/") (_, _, req_body), kwargs = con.pool.urlopen.call_args self.assertFalse(req_body) self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") self.assertNotIn("content-encoding", kwargs["headers"]) def test_cloud_id_http_compress_override(self): # 'http_compress' will be 'True' by default for connections with # 'cloud_id' set but should prioritize user-defined values. con = Urllib3HttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", ) self.assertEqual(con.http_compress, True) con = Urllib3HttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", http_compress=False, ) self.assertEqual(con.http_compress, False) con = Urllib3HttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", http_compress=True, ) self.assertEqual(con.http_compress, True) def test_default_user_agent(self): con = Urllib3HttpConnection() self.assertEqual( con._get_default_user_agent(), "elasticsearch-py/%s (Python %s)" % (__versionstr__, python_version()), ) def test_timeout_set(self): con = Urllib3HttpConnection(timeout=42) self.assertEqual(42, con.timeout) def test_keep_alive_is_on_by_default(self): con = Urllib3HttpConnection() self.assertEqual( { "connection": "keep-alive", "user-agent": con._get_default_user_agent(), }, con.headers, ) def test_http_auth(self): con = Urllib3HttpConnection(http_auth="username:secret") self.assertEqual( { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", "connection": "keep-alive", "user-agent": con._get_default_user_agent(), }, con.headers, ) def test_http_auth_tuple(self): con = Urllib3HttpConnection(http_auth=("username", "secret")) self.assertEqual( { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", "connection": "keep-alive", "user-agent": con._get_default_user_agent(), }, con.headers, ) def test_http_auth_list(self): con = Urllib3HttpConnection(http_auth=["username", "secret"]) self.assertEqual( { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", "connection": "keep-alive", "user-agent": con._get_default_user_agent(), }, con.headers, ) def test_uses_https_if_verify_certs_is_off(self): with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) self.assertEqual(1, len(w)) self.assertEqual( "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", str(w[0].message), ) self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) def test_nowarn_when_uses_https_if_verify_certs_is_off(self): with warnings.catch_warnings(record=True) as w: con = Urllib3HttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False ) self.assertEqual(0, len(w)) self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) def test_doesnt_use_https_if_not_specified(self): con = Urllib3HttpConnection() self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) def test_no_warning_when_using_ssl_context(self): ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: Urllib3HttpConnection(ssl_context=ctx) self.assertEqual(0, len(w)) def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, {"verify_certs": True}, {"verify_certs": False}, {"ca_certs": "/path/to/certs"}, {"ssl_show_warn": True, "ca_certs": "/path/to/certs"}, ): kwargs["ssl_context"] = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") Urllib3HttpConnection(**kwargs) self.assertEqual(1, len(w)) self.assertEqual( "When using `ssl_context`, all other SSL related kwargs are ignored", str(w[0].message), ) @patch("elasticsearch.connection.base.logger") def test_uncompressed_body_logged(self, logger): con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') self.assertEqual(2, logger.debug.call_count) req, resp = logger.debug.call_args_list self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) self.assertEqual("< {}", resp[0][0] % resp[0][1:]) def test_surrogatepass_into_bytes(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) @pytest.mark.skipif( not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" ) def test_recursion_error_reraised(self): conn = Urllib3HttpConnection() def urlopen_raise(*_, **__): raise RecursionError("Wasn't modified!") conn.pool.urlopen = urlopen_raise with pytest.raises(RecursionError) as e: conn.perform_request("GET", "/") assert str(e.value) == "Wasn't modified!" def test_mapbox_vector_tile_content_type(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection( response_body=buf, response_headers={"Content-Type": "application/vnd.mapbox-vector-tile"}, ) status, headers, data = con.perform_request("GET", "/") # Response is returned as binary assert data == b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" class TestRequestsConnection(TestCase): def _get_mock_connection( self, connection_params={}, status_code=200, response_body=b"{}", response_headers={}, ): con = RequestsHttpConnection(**connection_params) def _dummy_send(*args, **kwargs): dummy_response = Mock() dummy_response.headers = response_headers dummy_response.status_code = status_code dummy_response.content = response_body dummy_response.request = args[0] dummy_response.cookies = {} _dummy_send.call_args = (args, kwargs) return dummy_response con.session.send = _dummy_send return con def _get_request(self, connection, *args, **kwargs): if "body" in kwargs: kwargs["body"] = kwargs["body"].encode("utf-8") status, headers, data = connection.perform_request(*args, **kwargs) self.assertEqual(200, status) self.assertEqual("{}", data) timeout = kwargs.pop("timeout", connection.timeout) args, kwargs = connection.session.send.call_args self.assertEqual(timeout, kwargs["timeout"]) self.assertEqual(1, len(args)) return args[0] def test_custom_http_auth_is_allowed(self): auth = AuthBase() c = RequestsHttpConnection(http_auth=auth) self.assertEqual(auth, c.session.auth) def test_timeout_set(self): con = RequestsHttpConnection(timeout=42) self.assertEqual(42, con.timeout) def test_opaque_id(self): con = RequestsHttpConnection(opaque_id="app-1") self.assertEqual(con.headers["x-opaque-id"], "app-1") def test_http_cloud_id(self): con = RequestsHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==" ) self.assertTrue(con.use_ssl) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) self.assertEqual(con.port, None) self.assertEqual( con.hostname, "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) self.assertTrue(con.http_compress) con = RequestsHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", port=9243, ) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io:9243", ) self.assertEqual(con.port, 9243) self.assertEqual( con.hostname, "4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) def test_api_key_auth(self): # test with tuple con = RequestsHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", api_key=("elastic", "changeme1"), ) self.assertEqual( con.session.headers["authorization"], "ApiKey ZWxhc3RpYzpjaGFuZ2VtZTE=" ) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) # test with base64 encoded string con = RequestsHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", api_key="ZWxhc3RpYzpjaGFuZ2VtZTI=", ) self.assertEqual( con.session.headers["authorization"], "ApiKey ZWxhc3RpYzpjaGFuZ2VtZTI=" ) self.assertEqual( con.host, "https://4fa8821e75634032bed1cf22110e2f97.us-east-1.aws.found.io" ) def test_no_http_compression(self): con = self._get_mock_connection() self.assertFalse(con.http_compress) self.assertNotIn("content-encoding", con.session.headers) con.perform_request("GET", "/") req = con.session.send.call_args[0][0] self.assertNotIn("content-encoding", req.headers) self.assertNotIn("accept-encoding", req.headers) def test_http_compression(self): con = self._get_mock_connection( {"http_compress": True}, ) self.assertTrue(con.http_compress) # 'content-encoding' shouldn't be set at a session level. # Should be applied only if the request is sent with a body. self.assertNotIn("content-encoding", con.session.headers) con.perform_request("GET", "/", body=b"{}") req = con.session.send.call_args[0][0] self.assertEqual(req.headers["content-encoding"], "gzip") self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") con.perform_request("GET", "/") req = con.session.send.call_args[0][0] self.assertNotIn("content-encoding", req.headers) self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") def test_cloud_id_http_compress_override(self): # 'http_compress' will be 'True' by default for connections with # 'cloud_id' set but should prioritize user-defined values. con = RequestsHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", ) self.assertEqual(con.http_compress, True) con = RequestsHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", http_compress=False, ) self.assertEqual(con.http_compress, False) con = RequestsHttpConnection( cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", http_compress=True, ) self.assertEqual(con.http_compress, True) def test_uses_https_if_verify_certs_is_off(self): with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( {"use_ssl": True, "url_prefix": "url", "verify_certs": False} ) self.assertEqual(1, len(w)) self.assertEqual( "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", str(w[0].message), ) request = self._get_request(con, "GET", "/") self.assertEqual("https://localhost:9200/url/", request.url) self.assertEqual("GET", request.method) self.assertEqual(None, request.body) def test_nowarn_when_uses_https_if_verify_certs_is_off(self): with warnings.catch_warnings(record=True) as w: con = self._get_mock_connection( { "use_ssl": True, "url_prefix": "url", "verify_certs": False, "ssl_show_warn": False, } ) self.assertEqual(0, len(w)) request = self._get_request(con, "GET", "/") self.assertEqual("https://localhost:9200/url/", request.url) self.assertEqual("GET", request.method) self.assertEqual(None, request.body) def test_merge_headers(self): con = self._get_mock_connection( connection_params={"headers": {"h1": "v1", "h2": "v2"}} ) req = self._get_request(con, "GET", "/", headers={"h2": "v2p", "h3": "v3"}) self.assertEqual(req.headers["h1"], "v1") self.assertEqual(req.headers["h2"], "v2p") self.assertEqual(req.headers["h3"], "v3") def test_default_headers(self): con = self._get_mock_connection() req = self._get_request(con, "GET", "/") self.assertEqual(req.headers, {"user-agent": con._get_default_user_agent()}) def test_custom_headers(self): con = self._get_mock_connection() req = self._get_request( con, "GET", "/", headers={ "content-type": "application/x-ndjson", "user-agent": "custom-agent/1.2.3", }, ) self.assertEqual(req.headers["content-type"], "application/x-ndjson") self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") def test_http_auth(self): con = RequestsHttpConnection(http_auth="username:secret") self.assertEqual(("username", "secret"), con.session.auth) def test_http_auth_tuple(self): con = RequestsHttpConnection(http_auth=("username", "secret")) self.assertEqual(("username", "secret"), con.session.auth) def test_http_auth_list(self): con = RequestsHttpConnection(http_auth=["username", "secret"]) self.assertEqual(("username", "secret"), con.session.auth) def test_repr(self): con = self._get_mock_connection({"host": "elasticsearch.com", "port": 443}) self.assertEqual( "", repr(con) ) def test_conflict_error_is_returned_on_409(self): con = self._get_mock_connection(status_code=409) self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") def test_not_found_error_is_returned_on_404(self): con = self._get_mock_connection(status_code=404) self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") def test_request_error_is_returned_on_400(self): con = self._get_mock_connection(status_code=400) self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") @patch("elasticsearch.connection.base.logger") def test_head_with_404_doesnt_get_logged(self, logger): con = self._get_mock_connection(status_code=404) self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") self.assertEqual(0, logger.warning.call_count) @patch("elasticsearch.connection.base.tracer") @patch("elasticsearch.connection.base.logger") def test_failed_request_logs_and_traces(self, logger, tracer): con = self._get_mock_connection( response_body=b'{"answer": 42}', status_code=500 ) self.assertRaises( TransportError, con.perform_request, "GET", "/", {"param": 42}, "{}".encode("utf-8"), ) # trace request self.assertEqual(1, tracer.info.call_count) # trace response self.assertEqual(1, tracer.debug.call_count) # log url and duration self.assertEqual(1, logger.warning.call_count) self.assertTrue( re.match( r"^GET http://localhost:9200/\?param=42 \[status:500 request:0.[0-9]{3}s\]", logger.warning.call_args[0][0] % logger.warning.call_args[0][1:], ) ) @patch("elasticsearch.connection.base.tracer") @patch("elasticsearch.connection.base.logger") def test_success_logs_and_traces(self, logger, tracer): con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") status, headers, data = con.perform_request( "GET", "/", {"param": 42}, """{"question": "what's that?"}""".encode("utf-8"), ) # trace request self.assertEqual(1, tracer.info.call_count) self.assertEqual( """curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n "question": "what\\u0027s that?"\n}'""", tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], ) # trace response self.assertEqual(1, tracer.debug.call_count) self.assertTrue( re.match( r'#\[200\] \(0.[0-9]{3}s\)\n#{\n# "answer": "that\\u0027s it!"\n#}', tracer.debug.call_args[0][0] % tracer.debug.call_args[0][1:], ) ) # log url and duration self.assertEqual(1, logger.info.call_count) self.assertTrue( re.match( r"GET http://localhost:9200/\?param=42 \[status:200 request:0.[0-9]{3}s\]", logger.info.call_args[0][0] % logger.info.call_args[0][1:], ) ) # log request body and response self.assertEqual(2, logger.debug.call_count) req, resp = logger.debug.call_args_list self.assertEqual('> {"question": "what\'s that?"}', req[0][0] % req[0][1:]) self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) @patch("elasticsearch.connection.base.logger") def test_uncompressed_body_logged(self, logger): con = self._get_mock_connection(connection_params={"http_compress": True}) con.perform_request("GET", "/", body=b'{"example": "body"}') self.assertEqual(2, logger.debug.call_count) req, resp = logger.debug.call_args_list self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) self.assertEqual("< {}", resp[0][0] % resp[0][1:]) con = self._get_mock_connection( connection_params={"http_compress": True}, status_code=500, response_body=b'{"hello":"world"}', ) with pytest.raises(TransportError): con.perform_request("GET", "/", body=b'{"example": "body2"}') self.assertEqual(4, logger.debug.call_count) _, _, req, resp = logger.debug.call_args_list self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) def test_defaults(self): con = self._get_mock_connection() request = self._get_request(con, "GET", "/") self.assertEqual("http://localhost:9200/", request.url) self.assertEqual("GET", request.method) self.assertEqual(None, request.body) def test_params_properly_encoded(self): con = self._get_mock_connection() request = self._get_request( con, "GET", "/", params={"param": "value with spaces"} ) self.assertEqual("http://localhost:9200/?param=value+with+spaces", request.url) self.assertEqual("GET", request.method) self.assertEqual(None, request.body) def test_body_attached(self): con = self._get_mock_connection() request = self._get_request(con, "GET", "/", body='{"answer": 42}') self.assertEqual("http://localhost:9200/", request.url) self.assertEqual("GET", request.method) self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) def test_http_auth_attached(self): con = self._get_mock_connection({"http_auth": "username:secret"}) request = self._get_request(con, "GET", "/") self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") @patch("elasticsearch.connection.base.tracer") def test_url_prefix(self, tracer): con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) request = self._get_request( con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 ) self.assertEqual("http://localhost:9200/some-prefix/_search", request.url) self.assertEqual("GET", request.method) self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) # trace request self.assertEqual(1, tracer.info.call_count) self.assertEqual( "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/_search?pretty' -d '{\n \"answer\": 42\n}'", tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], ) def test_surrogatepass_into_bytes(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection(response_body=buf) status, headers, data = con.perform_request("GET", "/") self.assertEqual(u"你好\uda6a", data) @pytest.mark.skipif( not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" ) def test_recursion_error_reraised(self): conn = RequestsHttpConnection() def send_raise(*_, **__): raise RecursionError("Wasn't modified!") conn.session.send = send_raise with pytest.raises(RecursionError) as e: conn.perform_request("GET", "/") assert str(e.value) == "Wasn't modified!" def test_mapbox_vector_tile_content_type(self): buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = self._get_mock_connection( response_body=buf, response_headers={"Content-Type": "application/vnd.mapbox-vector-tile"}, ) status, headers, data = con.perform_request("GET", "/") # Response is returned as binary assert data == b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" class TestConnectionHttpbin: """Tests the HTTP connection implementations against a live server E2E""" def httpbin_anything(self, conn, **kwargs): status, headers, data = conn.perform_request("GET", "/anything", **kwargs) data = json.loads(data) data["headers"].pop( "X-Amzn-Trace-Id", None ) # Remove this header as it's put there by AWS. assert all(header == header.lower() for header in headers) return (status, data) def test_urllib3_connection(self): # Defaults conn = Urllib3HttpConnection("httpbin.org", port=443, use_ssl=True) user_agent = conn._get_default_user_agent() status, data = self.httpbin_anything(conn) assert status == 200 assert data["method"] == "GET" assert data["headers"] == { "Accept-Encoding": "identity", "Host": "httpbin.org", "User-Agent": user_agent, } # http_compress=False conn = Urllib3HttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=False ) status, data = self.httpbin_anything(conn) assert status == 200 assert data["method"] == "GET" assert data["headers"] == { "Accept-Encoding": "identity", "Host": "httpbin.org", "User-Agent": user_agent, } # http_compress=True conn = Urllib3HttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=True ) status, data = self.httpbin_anything(conn) assert status == 200 assert data["headers"] == { "Accept-Encoding": "gzip,deflate", "Host": "httpbin.org", "User-Agent": user_agent, } # Headers conn = Urllib3HttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=True, headers={"header1": "value1"}, ) status, data = self.httpbin_anything( conn, headers={"header2": "value2", "header1": "override!"} ) assert status == 200 assert data["headers"] == { "Accept-Encoding": "gzip,deflate", "Host": "httpbin.org", "Header1": "override!", "Header2": "value2", "User-Agent": user_agent, } def test_urllib3_connection_error(self): conn = Urllib3HttpConnection("not.a.host.name") with pytest.raises(ConnectionError): conn.perform_request("GET", "/") def test_requests_connection(self): # Defaults conn = RequestsHttpConnection("httpbin.org", port=443, use_ssl=True) user_agent = conn._get_default_user_agent() status, data = self.httpbin_anything(conn) assert status == 200 assert data["method"] == "GET" assert data["headers"] == { "Accept-Encoding": "identity", "Host": "httpbin.org", "User-Agent": user_agent, } # http_compress=False conn = RequestsHttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=False ) status, data = self.httpbin_anything(conn) assert status == 200 assert data["method"] == "GET" assert data["headers"] == { "Accept-Encoding": "identity", "Host": "httpbin.org", "User-Agent": user_agent, } # http_compress=True conn = RequestsHttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=True ) status, data = self.httpbin_anything(conn) assert status == 200 assert data["headers"] == { "Accept-Encoding": "gzip,deflate", "Host": "httpbin.org", "User-Agent": user_agent, } # Headers conn = RequestsHttpConnection( "httpbin.org", port=443, use_ssl=True, http_compress=True, headers={"header1": "value1"}, ) status, data = self.httpbin_anything( conn, headers={"header2": "value2", "header1": "override!"} ) assert status == 200 assert data["headers"] == { "Accept-Encoding": "gzip,deflate", "Host": "httpbin.org", "Header1": "override!", "Header2": "value2", "User-Agent": user_agent, } def test_requests_connection_error(self): conn = RequestsHttpConnection("not.a.host.name") with pytest.raises(ConnectionError): conn.perform_request("GET", "/") def test_elasticsearch_connection_error(self): es = Elasticsearch("http://not.a.host.name") with pytest.raises(ConnectionError): es.search() elasticsearch-py-7.17.6/test_elasticsearch/test_connection_pool.py000066400000000000000000000132411426163262700255400ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import time from elasticsearch.connection import Connection from elasticsearch.connection_pool import ( ConnectionPool, DummyConnectionPool, RoundRobinSelector, ) from elasticsearch.exceptions import ImproperlyConfigured from .test_cases import TestCase class TestConnectionPool(TestCase): def test_dummy_cp_raises_exception_on_more_connections(self): self.assertRaises(ImproperlyConfigured, DummyConnectionPool, []) self.assertRaises( ImproperlyConfigured, DummyConnectionPool, [object(), object()] ) def test_raises_exception_when_no_connections_defined(self): self.assertRaises(ImproperlyConfigured, ConnectionPool, []) def test_default_round_robin(self): pool = ConnectionPool([(x, {}) for x in range(100)]) connections = set() for _ in range(100): connections.add(pool.get_connection()) self.assertEqual(connections, set(range(100))) def test_disable_shuffling(self): pool = ConnectionPool([(x, {}) for x in range(100)], randomize_hosts=False) connections = [] for _ in range(100): connections.append(pool.get_connection()) self.assertEqual(connections, list(range(100))) def test_selectors_have_access_to_connection_opts(self): class MySelector(RoundRobinSelector): def select(self, connections): return self.connection_opts[ super(MySelector, self).select(connections) ]["actual"] pool = ConnectionPool( [(x, {"actual": x * x}) for x in range(100)], selector_class=MySelector, randomize_hosts=False, ) connections = [] for _ in range(100): connections.append(pool.get_connection()) self.assertEqual(connections, [x * x for x in range(100)]) def test_dead_nodes_are_removed_from_active_connections(self): pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.mark_dead(42, now=now) self.assertEqual(99, len(pool.connections)) self.assertEqual(1, pool.dead.qsize()) self.assertEqual((now + 60, 42), pool.dead.get()) def test_connection_is_skipped_when_dead(self): pool = ConnectionPool([(x, {}) for x in range(2)]) pool.mark_dead(0) self.assertEqual( [1, 1, 1], [pool.get_connection(), pool.get_connection(), pool.get_connection()], ) def test_new_connection_is_not_marked_dead(self): # Create 10 connections pool = ConnectionPool([(Connection(), {}) for _ in range(10)]) # Pass in a new connection that is not in the pool to mark as dead new_connection = Connection() pool.mark_dead(new_connection) # Nothing should be marked dead self.assertEqual(0, len(pool.dead_count)) def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self): pool = ConnectionPool([(x, {}) for x in range(2)]) pool.dead_count[0] = 1 pool.mark_dead(0) # failed twice, longer timeout pool.mark_dead(1) # failed the first time, first to be resurrected self.assertEqual([], pool.connections) self.assertEqual(1, pool.get_connection()) self.assertEqual([1], pool.connections) def test_connection_is_resurrected_after_its_timeout(self): pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.mark_dead(42, now=now - 61) pool.get_connection() self.assertEqual(42, pool.connections[-1]) self.assertEqual(100, len(pool.connections)) def test_force_resurrect_always_returns_a_connection(self): pool = ConnectionPool([(0, {})]) pool.connections = [] self.assertEqual(0, pool.get_connection()) self.assertEqual([], pool.connections) self.assertTrue(pool.dead.empty()) def test_already_failed_connection_has_longer_timeout(self): pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 pool.mark_dead(42, now=now) self.assertEqual(3, pool.dead_count[42]) self.assertEqual((now + 4 * 60, 42), pool.dead.get()) def test_timeout_for_failed_connections_is_limitted(self): pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 245 pool.mark_dead(42, now=now) self.assertEqual(246, pool.dead_count[42]) self.assertEqual((now + 32 * 60, 42), pool.dead.get()) def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self): pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 pool.mark_dead(42, now=now) self.assertEqual(3, pool.dead_count[42]) pool.mark_live(42) self.assertNotIn(42, pool.dead_count) elasticsearch-py-7.17.6/test_elasticsearch/test_exceptions.py000066400000000000000000000030601426163262700245270ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from elasticsearch.exceptions import TransportError from .test_cases import TestCase class TestTransformError(TestCase): def test_transform_error_parse_with_error_reason(self): e = TransportError( 500, "InternalServerError", {"error": {"root_cause": [{"type": "error", "reason": "error reason"}]}}, ) self.assertEqual( str(e), "TransportError(500, 'InternalServerError', 'error reason')" ) def test_transform_error_parse_with_error_string(self): e = TransportError( 500, "InternalServerError", {"error": "something error message"} ) self.assertEqual( str(e), "TransportError(500, 'InternalServerError', 'something error message')", ) elasticsearch-py-7.17.6/test_elasticsearch/test_helpers.py000066400000000000000000000170711426163262700240170ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import threading import time import mock import pytest from elasticsearch import Elasticsearch, helpers from elasticsearch.helpers import actions from elasticsearch.serializer import JSONSerializer from .test_cases import TestCase lock_side_effect = threading.Lock() def mock_process_bulk_chunk(*args, **kwargs): """ Threadsafe way of mocking process bulk chunk: https://stackoverflow.com/questions/39332139/thread-safe-version-of-mock-call-count """ with lock_side_effect: mock_process_bulk_chunk.call_count += 1 time.sleep(0.1) return [] mock_process_bulk_chunk.call_count = 0 class TestParallelBulk(TestCase): @mock.patch( "elasticsearch.helpers.actions._process_bulk_chunk", side_effect=mock_process_bulk_chunk, ) def test_all_chunks_sent(self, _process_bulk_chunk): actions = ({"x": i} for i in range(100)) list(helpers.parallel_bulk(Elasticsearch(), actions, chunk_size=2)) self.assertEqual(50, mock_process_bulk_chunk.call_count) @pytest.mark.skip @mock.patch( "elasticsearch.helpers.actions._process_bulk_chunk", # make sure we spend some time in the thread side_effect=lambda *a: [ (True, time.sleep(0.001) or threading.current_thread().ident) ], ) def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): actions = ({"x": i} for i in range(100)) results = list( helpers.parallel_bulk( Elasticsearch(), actions, thread_count=10, chunk_size=2 ) ) self.assertTrue(len(set([r[1] for r in results])) > 1) class TestChunkActions(TestCase): def setup_method(self, _): self.actions = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] def test_expand_action(self): self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) self.assertEqual( helpers.expand_action({"key": "val"}), ({"index": {}}, {"key": "val"}) ) def test_expand_action_actions(self): self.assertEqual( helpers.expand_action( {"_op_type": "delete", "_id": "id", "_index": "index"} ), ({"delete": {"_id": "id", "_index": "index"}}, None), ) self.assertEqual( helpers.expand_action( {"_op_type": "update", "_id": "id", "_index": "index", "key": "val"} ), ({"update": {"_id": "id", "_index": "index"}}, {"key": "val"}), ) self.assertEqual( helpers.expand_action( {"_op_type": "create", "_id": "id", "_index": "index", "key": "val"} ), ({"create": {"_id": "id", "_index": "index"}}, {"key": "val"}), ) self.assertEqual( helpers.expand_action( { "_op_type": "create", "_id": "id", "_index": "index", "_source": {"key": "val"}, } ), ({"create": {"_id": "id", "_index": "index"}}, {"key": "val"}), ) def test_expand_action_options(self): for option in ( "_id", "_index", "_percolate", "_timestamp", "_type", "if_seq_no", "if_primary_term", "parent", "pipeline", "retry_on_conflict", "routing", "version", "version_type", ("_parent", "parent"), ("_retry_on_conflict", "retry_on_conflict"), ("_routing", "routing"), ("_version", "version"), ("_version_type", "version_type"), ("_if_seq_no", "if_seq_no"), ("_if_primary_term", "if_primary_term"), ): if isinstance(option, str): action_option = option else: option, action_option = option self.assertEqual( helpers.expand_action({"key": "val", option: 0}), ({"index": {action_option: 0}}, {"key": "val"}), ) def test__source_metadata_or_source(self): self.assertEqual( helpers.expand_action({"_source": {"key": "val"}}), ({"index": {}}, {"key": "val"}), ) self.assertEqual( helpers.expand_action( {"_source": ["key"], "key": "val", "_op_type": "update"} ), ({"update": {"_source": ["key"]}}, {"key": "val"}), ) self.assertEqual( helpers.expand_action( {"_source": True, "key": "val", "_op_type": "update"} ), ({"update": {"_source": True}}, {"key": "val"}), ) # This case is only to ensure backwards compatibility with old functionality. self.assertEqual( helpers.expand_action( {"_source": {"key2": "val2"}, "key": "val", "_op_type": "update"} ), ({"update": {}}, {"key2": "val2"}), ) def test_chunks_are_chopped_by_byte_size(self): self.assertEqual( 100, len( list(helpers._chunk_actions(self.actions, 100000, 1, JSONSerializer())) ), ) def test_chunks_are_chopped_by_chunk_size(self): self.assertEqual( 10, len( list( helpers._chunk_actions(self.actions, 10, 99999999, JSONSerializer()) ) ), ) def test_chunks_are_chopped_by_byte_size_properly(self): max_byte_size = 170 chunks = list( helpers._chunk_actions( self.actions, 100000, max_byte_size, JSONSerializer() ) ) self.assertEqual(25, len(chunks)) for chunk_data, chunk_actions in chunks: chunk = u"".join(chunk_actions) chunk = chunk if isinstance(chunk, str) else chunk.encode("utf-8") self.assertLessEqual(len(chunk), max_byte_size) def test_add_helper_meta_to_kwargs(self): self.assertEqual( actions._add_helper_meta_to_kwargs({}, "b"), {"params": {"__elastic_client_meta": (("h", "b"),)}}, ) self.assertEqual( actions._add_helper_meta_to_kwargs({"params": {}}, "b"), {"params": {"__elastic_client_meta": (("h", "b"),)}}, ) self.assertEqual( actions._add_helper_meta_to_kwargs({"params": {"key": "value"}}, "b"), {"params": {"__elastic_client_meta": (("h", "b"),), "key": "value"}}, ) class TestExpandActions(TestCase): def test_string_actions_are_marked_as_simple_inserts(self): self.assertEqual( ('{"index":{}}', "whatever"), helpers.expand_action("whatever") ) elasticsearch-py-7.17.6/test_elasticsearch/test_module.py000066400000000000000000000033401426163262700236340ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import importlib import sys import warnings import pytest import elasticsearch @pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python 3.6+") def test_no_deprecation_python3_6_and_later(): with warnings.catch_warnings(record=True) as w: importlib.reload(elasticsearch) assert len(w) == 0 @pytest.mark.skipif(sys.version_info >= (3, 6), reason="Requires Python <3.6") def test_deprecated_python3_5_and_earlier(): try: # Python 3.4+ import imp reload = imp.reload except ImportError: # Python 2.7 reload = reload with pytest.warns(DeprecationWarning) as w: reload(elasticsearch) assert len(w) == 1 assert str(w[0].message) == ( "Support for Python 3.5 and earlier is deprecated and will be removed " "in v8.0.0 (current instance is Python %d.%d) See https://github.com/" "elastic/elasticsearch-py/issues/1696 for details." % (sys.version_info[:2]) ) elasticsearch-py-7.17.6/test_elasticsearch/test_serializer.py000066400000000000000000000171641426163262700245310ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys import uuid from datetime import datetime from decimal import Decimal try: import numpy as np import pandas as pd except ImportError: np = pd = None from elasticsearch.exceptions import ImproperlyConfigured, SerializationError from elasticsearch.serializer import ( DEFAULT_SERIALIZERS, Deserializer, JSONSerializer, TextSerializer, ) from .test_cases import SkipTest, TestCase def requires_numpy_and_pandas(): if np is None or pd is None: raise SkipTest("Test requires numpy or pandas to be available") class TestJSONSerializer(TestCase): def test_datetime_serialization(self): self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": datetime(2010, 10, 1, 2, 30)}), ) def test_decimal_serialization(self): requires_numpy_and_pandas() if sys.version_info[:2] == (2, 6): raise SkipTest("Float rounding is broken in 2.6.") self.assertEqual('{"d":3.8}', JSONSerializer().dumps({"d": Decimal("3.8")})) def test_uuid_serialization(self): self.assertEqual( '{"d":"00000000-0000-0000-0000-000000000003"}', JSONSerializer().dumps( {"d": uuid.UUID("00000000-0000-0000-0000-000000000003")} ), ) def test_serializes_numpy_bool(self): requires_numpy_and_pandas() self.assertEqual('{"d":true}', JSONSerializer().dumps({"d": np.bool_(True)})) def test_serializes_numpy_integers(self): requires_numpy_and_pandas() ser = JSONSerializer() for np_type in ( np.int_, np.int8, np.int16, np.int32, np.int64, ): self.assertEqual(ser.dumps({"d": np_type(-1)}), '{"d":-1}') for np_type in ( np.uint8, np.uint16, np.uint32, np.uint64, ): self.assertEqual(ser.dumps({"d": np_type(1)}), '{"d":1}') def test_serializes_numpy_floats(self): requires_numpy_and_pandas() ser = JSONSerializer() for np_type in ( np.float_, np.float32, np.float64, ): self.assertRegexpMatches( ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$' ) def test_serializes_numpy_datetime(self): requires_numpy_and_pandas() self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": np.datetime64("2010-10-01T02:30:00")}), ) def test_serializes_numpy_ndarray(self): requires_numpy_and_pandas() self.assertEqual( '{"d":[0,0,0,0,0]}', JSONSerializer().dumps({"d": np.zeros((5,), dtype=np.uint8)}), ) # This isn't useful for Elasticsearch, just want to make sure it works. self.assertEqual( '{"d":[[0,0],[0,0]]}', JSONSerializer().dumps({"d": np.zeros((2, 2), dtype=np.uint8)}), ) def test_serializes_numpy_nan_to_nan(self): requires_numpy_and_pandas() self.assertEqual( '{"d":NaN}', JSONSerializer().dumps({"d": np.nan}), ) def test_serializes_pandas_timestamp(self): requires_numpy_and_pandas() self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": pd.Timestamp("2010-10-01T02:30:00")}), ) def test_serializes_pandas_series(self): requires_numpy_and_pandas() self.assertEqual( '{"d":["a","b","c","d"]}', JSONSerializer().dumps({"d": pd.Series(["a", "b", "c", "d"])}), ) def test_serializes_pandas_na(self): requires_numpy_and_pandas() if not hasattr(pd, "NA"): # pandas.NA added in v1 raise SkipTest("pandas.NA required") self.assertEqual( '{"d":null}', JSONSerializer().dumps({"d": pd.NA}), ) def test_raises_serialization_error_pandas_nat(self): requires_numpy_and_pandas() if not hasattr(pd, "NaT"): raise SkipTest("pandas.NaT required") self.assertRaises(SerializationError, JSONSerializer().dumps, {"d": pd.NaT}) def test_serializes_pandas_category(self): requires_numpy_and_pandas() cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"]) self.assertEqual( '{"d":["a","c","b","a"]}', JSONSerializer().dumps({"d": cat}), ) cat = pd.Categorical([1, 2, 3], categories=[1, 2, 3]) self.assertEqual( '{"d":[1,2,3]}', JSONSerializer().dumps({"d": cat}), ) def test_raises_serialization_error_on_dump_error(self): self.assertRaises(SerializationError, JSONSerializer().dumps, object()) def test_raises_serialization_error_on_load_error(self): self.assertRaises(SerializationError, JSONSerializer().loads, object()) self.assertRaises(SerializationError, JSONSerializer().loads, "") self.assertRaises(SerializationError, JSONSerializer().loads, "{{") def test_strings_are_left_untouched(self): self.assertEqual("你好", JSONSerializer().dumps("你好")) class TestTextSerializer(TestCase): def test_strings_are_left_untouched(self): self.assertEqual("你好", TextSerializer().dumps("你好")) def test_raises_serialization_error_on_dump_error(self): self.assertRaises(SerializationError, TextSerializer().dumps, {}) class TestDeserializer(TestCase): def setup_method(self, _): self.de = Deserializer(DEFAULT_SERIALIZERS) def test_deserializes_json_by_default(self): self.assertEqual({"some": "data"}, self.de.loads('{"some":"data"}')) def test_deserializes_text_with_correct_ct(self): self.assertEqual( '{"some":"data"}', self.de.loads('{"some":"data"}', "text/plain") ) self.assertEqual( '{"some":"data"}', self.de.loads('{"some":"data"}', "text/plain; charset=whatever"), ) def test_deserialize_compatibility_header(self): for content_type in ( "application/vnd.elasticsearch+json;compatible-with=7", "application/vnd.elasticsearch+json; compatible-with=7", "application/vnd.elasticsearch+json;compatible-with=8", "application/vnd.elasticsearch+json; compatible-with=8", ): self.assertEqual( {"some": "data"}, self.de.loads('{"some":"data"}', content_type) ) def test_raises_serialization_error_on_unknown_mimetype(self): self.assertRaises(SerializationError, self.de.loads, "{}", "text/html") def test_raises_improperly_configured_when_default_mimetype_cannot_be_deserialized( self, ): self.assertRaises(ImproperlyConfigured, Deserializer, {}) elasticsearch-py-7.17.6/test_elasticsearch/test_server/000077500000000000000000000000001426163262700233035ustar00rootroot00000000000000elasticsearch-py-7.17.6/test_elasticsearch/test_server/__init__.py000066400000000000000000000033601426163262700254160ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from unittest import SkipTest from elasticsearch.helpers import test from elasticsearch.helpers.test import ElasticsearchTestCase as BaseTestCase client = None def get_client(**kwargs): global client if client is False: raise SkipTest("No client is available") if client is not None and not kwargs: return client # try and locate manual override in the local environment try: from test_elasticsearch.local import get_client as local_get_client new_client = local_get_client(**kwargs) except ImportError: # fallback to using vanilla client try: new_client = test.get_test_client(**kwargs) except SkipTest: client = False raise if not kwargs: client = new_client return new_client def setup_module(): get_client() class ElasticsearchTestCase(BaseTestCase): @staticmethod def _get_client(**kwargs): return get_client(**kwargs) elasticsearch-py-7.17.6/test_elasticsearch/test_server/conftest.py000066400000000000000000000051071426163262700255050ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import time import pytest import elasticsearch from elasticsearch.helpers.test import CA_CERTS, ELASTICSEARCH_URL from ..utils import wipe_cluster # Information about the Elasticsearch instance running, if any # Used for ELASTICSEARCH_VERSION = "" ELASTICSEARCH_BUILD_HASH = "" ELASTICSEARCH_REST_API_TESTS = [] @pytest.fixture(scope="session") def sync_client_factory(): client = None try: # Configure the client with certificates and optionally # an HTTP conn class depending on 'PYTHON_CONNECTION_CLASS' envvar kw = { "timeout": 3, "ca_certs": CA_CERTS, "headers": {"Authorization": "Basic ZWxhc3RpYzpjaGFuZ2VtZQ=="}, } if "PYTHON_CONNECTION_CLASS" in os.environ: from elasticsearch import connection kw["connection_class"] = getattr( connection, os.environ["PYTHON_CONNECTION_CLASS"] ) # We do this little dance with the URL to force # Requests to respect 'headers: None' within rest API spec tests. client = elasticsearch.Elasticsearch( ELASTICSEARCH_URL.replace("elastic:changeme@", ""), **kw ) # Wait for the cluster to report a status of 'yellow' for _ in range(100): try: client.cluster.health(wait_for_status="yellow") break except ConnectionError: time.sleep(0.1) else: pytest.skip("Elasticsearch wasn't running at %r" % (ELASTICSEARCH_URL,)) wipe_cluster(client) yield client finally: if client: client.close() @pytest.fixture(scope="function") def sync_client(sync_client_factory): try: yield sync_client_factory finally: wipe_cluster(sync_client_factory) elasticsearch-py-7.17.6/test_elasticsearch/test_server/test_clients.py000066400000000000000000000031711426163262700263570ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals from . import ElasticsearchTestCase class TestUnicode(ElasticsearchTestCase): def test_indices_analyze(self): self.client.indices.analyze(body='{"text": "привет"}') class TestBulk(ElasticsearchTestCase): def test_bulk_works_with_string_body(self): docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = self.client.bulk(body=docs) self.assertFalse(response["errors"]) self.assertEqual(1, len(response["items"])) def test_bulk_works_with_bytestring_body(self): docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = self.client.bulk(body=docs) self.assertFalse(response["errors"]) self.assertEqual(1, len(response["items"])) elasticsearch-py-7.17.6/test_elasticsearch/test_server/test_helpers.py000066400000000000000000001010241426163262700263540ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import warnings from datetime import datetime, timedelta import pytest from dateutil import tz from mock import patch from elasticsearch import TransportError, helpers from elasticsearch.helpers import ScanError from ..test_cases import SkipTest from . import ElasticsearchTestCase class FailingBulkClient(object): def __init__( self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) ): self.client = client self._called = 0 self._fail_at = fail_at self.transport = client.transport self._fail_with = fail_with def bulk(self, *args, **kwargs): self._called += 1 if self._called in self._fail_at: raise self._fail_with return self.client.bulk(*args, **kwargs) class TestStreamingBulk(ElasticsearchTestCase): def test_actions_remain_unchanged(self): actions = [{"_id": 1}, {"_id": 2}] for ok, item in helpers.streaming_bulk( self.client, actions, index="test-index" ): self.assertTrue(ok) self.assertEqual([{"_id": 1}, {"_id": 2}], actions) def test_all_documents_get_inserted(self): docs = [{"answer": x, "_id": x} for x in range(100)] with warnings.catch_warnings(record=True) as w: for ok, item in helpers.streaming_bulk( self.client, docs, index="test-index", refresh=True ): self.assertTrue(ok) self.assertEqual(w, []) self.assertEqual(100, self.client.count(index="test-index")["count"]) self.assertEqual( {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) def test_all_errors_from_chunk_are_raised_on_failure(self): self.client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) self.client.cluster.health(wait_for_status="yellow") try: for ok, item in helpers.streaming_bulk( self.client, [{"a": "b"}, {"a": "c"}], index="i", raise_on_error=True ): self.assertTrue(ok) except helpers.BulkIndexError as e: self.assertEqual(2, len(e.errors)) else: assert False, "exception should have been raised" def test_different_op_types(self): if self.es_version() < (0, 90, 1): raise SkipTest("update supported since 0.90.1") self.client.index(index="i", id=45, body={}) self.client.index(index="i", id=42, body={}) docs = [ {"_index": "i", "_type": "_doc", "_id": 47, "f": "v"}, {"_op_type": "delete", "_index": "i", "_type": "_doc", "_id": 45}, { "_op_type": "update", "_index": "i", "_type": "_doc", "_id": 42, "doc": {"answer": 42}, }, ] for ok, item in helpers.streaming_bulk(self.client, docs): self.assertTrue(ok) self.assertFalse(self.client.exists(index="i", id=45)) self.assertEqual({"answer": 42}, self.client.get(index="i", id=42)["_source"]) self.assertEqual({"f": "v"}, self.client.get(index="i", id=47)["_source"]) def test_transport_error_can_becaught(self): failing_client = FailingBulkClient(self.client) docs = [ {"_index": "i", "_type": "_doc", "_id": 47, "f": "v"}, {"_index": "i", "_type": "_doc", "_id": 45, "f": "v"}, {"_index": "i", "_type": "_doc", "_id": 42, "f": "v"}, ] results = list( helpers.streaming_bulk( failing_client, docs, raise_on_exception=False, raise_on_error=False, chunk_size=1, ) ) self.assertEqual(3, len(results)) self.assertEqual([True, False, True], [r[0] for r in results]) exc = results[1][1]["index"].pop("exception") self.assertIsInstance(exc, TransportError) self.assertEqual(599, exc.status_code) self.assertEqual( { "index": { "_index": "i", "_type": "_doc", "_id": 45, "data": {"f": "v"}, "error": "TransportError(599, 'Error!')", "status": 599, } }, results[1][1], ) def test_rejected_documents_are_retried(self): failing_client = FailingBulkClient( self.client, fail_with=TransportError(429, "Rejected!", {}) ) docs = [ {"_index": "i", "_type": "_doc", "_id": 47, "f": "v"}, {"_index": "i", "_type": "_doc", "_id": 45, "f": "v"}, {"_index": "i", "_type": "_doc", "_id": 42, "f": "v"}, ] results = list( helpers.streaming_bulk( failing_client, docs, raise_on_exception=False, raise_on_error=False, chunk_size=1, max_retries=1, initial_backoff=0, ) ) self.assertEqual(3, len(results)) self.assertEqual([True, True, True], [r[0] for r in results]) self.client.indices.refresh(index="i") res = self.client.search(index="i") self.assertEqual({"value": 3, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) def test_rejected_documents_are_retried_at_most_max_retries_times(self): failing_client = FailingBulkClient( self.client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) docs = [ {"_index": "i", "_type": "_doc", "_id": 47, "f": "v"}, {"_index": "i", "_type": "_doc", "_id": 45, "f": "v"}, {"_index": "i", "_type": "_doc", "_id": 42, "f": "v"}, ] results = list( helpers.streaming_bulk( failing_client, docs, raise_on_exception=False, raise_on_error=False, chunk_size=1, max_retries=1, initial_backoff=0, ) ) self.assertEqual(3, len(results)) self.assertEqual([False, True, True], [r[0] for r in results]) self.client.indices.refresh(index="i") res = self.client.search(index="i") self.assertEqual({"value": 2, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) def test_transport_error_is_raised_with_max_retries(self): failing_client = FailingBulkClient( self.client, fail_at=(1, 2, 3, 4), fail_with=TransportError(429, "Rejected!", {}), ) def streaming_bulk(): results = list( helpers.streaming_bulk( failing_client, [{"a": 42}, {"a": 39}], raise_on_exception=True, max_retries=3, initial_backoff=0, ) ) return results self.assertRaises(TransportError, streaming_bulk) self.assertEqual(4, failing_client._called) class TestBulk(ElasticsearchTestCase): def test_bulk_works_with_single_item(self): docs = [{"answer": 42, "_id": 1}] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True ) self.assertEqual(1, success) self.assertFalse(failed) self.assertEqual(1, self.client.count(index="test-index")["count"]) self.assertEqual( {"answer": 42}, self.client.get(index="test-index", id=1)["_source"] ) def test_all_documents_get_inserted(self): docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True ) self.assertEqual(100, success) self.assertFalse(failed) self.assertEqual(100, self.client.count(index="test-index")["count"]) self.assertEqual( {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) def test_stats_only_reports_numbers(self): docs = [{"answer": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True, stats_only=True ) self.assertEqual(100, success) self.assertEqual(0, failed) self.assertEqual(100, self.client.count(index="test-index")["count"]) def test_errors_are_reported_correctly(self): self.client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) self.client.cluster.health(wait_for_status="yellow") success, failed = helpers.bulk( self.client, [{"a": 42}, {"a": "c", "_id": 42}], index="i", raise_on_error=False, ) self.assertEqual(1, success) self.assertEqual(1, len(failed)) error = failed[0] self.assertEqual("42", error["index"]["_id"]) self.assertEqual("_doc", error["index"]["_type"]) self.assertEqual("i", error["index"]["_index"]) print(error["index"]["error"]) self.assertTrue( "MapperParsingException" in repr(error["index"]["error"]) or "mapper_parsing_exception" in repr(error["index"]["error"]) ) def test_error_is_raised(self): self.client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) self.client.cluster.health(wait_for_status="yellow") self.assertRaises( helpers.BulkIndexError, helpers.bulk, self.client, [{"a": 42}, {"a": "c"}], index="i", ) def test_ignore_error_if_raised(self): # ignore the status code 400 in tuple helpers.bulk( self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) ) # ignore the status code 400 in list helpers.bulk( self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=[ 400, ], ) # ignore the status code 400 helpers.bulk(self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=400) # ignore only the status code in the `ignore_status` argument self.assertRaises( helpers.BulkIndexError, helpers.bulk, self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(444,), ) # ignore transport error exception failing_client = FailingBulkClient(self.client) helpers.bulk(failing_client, [{"a": 42}], index="i", ignore_status=(599,)) def test_errors_are_collected_properly(self): self.client.indices.create( "i", { "mappings": {"properties": {"a": {"type": "integer"}}}, "settings": {"number_of_shards": 1, "number_of_replicas": 0}, }, ) self.client.cluster.health(wait_for_status="yellow") success, failed = helpers.bulk( self.client, [{"a": 42}, {"a": "c"}], index="i", stats_only=True, raise_on_error=False, ) self.assertEqual(1, success) self.assertEqual(1, failed) class TestScan(ElasticsearchTestCase): mock_scroll_responses = [ { "_scroll_id": "dummy_id", "_shards": {"successful": 4, "total": 5, "skipped": 0}, "hits": {"hits": [{"scroll_data": 42}]}, }, { "_scroll_id": "dummy_id", "_shards": {"successful": 4, "total": 5, "skipped": 0}, "hits": {"hits": []}, }, ] def teardown_method(self, m): self.client.transport.perform_request("DELETE", "/_search/scroll/_all") super(TestScan, self).teardown_method(m) def test_order_can_be_preserved(self): bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_type": "_doc", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) self.client.bulk(bulk, refresh=True) with warnings.catch_warnings(record=True) as w: docs = list( helpers.scan( self.client, index="test_index", query={"sort": "answer"}, preserve_order=True, ) ) # Asserts that no warnings are raised. self.assertEqual(w, []) self.assertEqual(100, len(docs)) self.assertEqual(list(map(str, range(100))), list(d["_id"] for d in docs)) self.assertEqual(list(range(100)), list(d["_source"]["answer"] for d in docs)) def test_all_documents_are_read(self): bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_type": "_doc", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) self.client.bulk(bulk, refresh=True) docs = list(helpers.scan(self.client, index="test_index", size=2)) self.assertEqual(100, len(docs)) self.assertEqual(set(map(str, range(100))), set(d["_id"] for d in docs)) self.assertEqual(set(range(100)), set(d["_source"]["answer"] for d in docs)) def test_scroll_error(self): bulk = [] for x in range(4): bulk.append({"index": {"_index": "test_index", "_type": "_doc"}}) bulk.append({"value": x}) self.client.bulk(bulk, refresh=True) with patch.object(self.client, "scroll") as scroll_mock: scroll_mock.side_effect = self.mock_scroll_responses data = list( helpers.scan( self.client, index="test_index", size=2, raise_on_error=False, clear_scroll=False, ) ) self.assertEqual(len(data), 3) self.assertEqual(data[-1], {"scroll_data": 42}) scroll_mock.side_effect = self.mock_scroll_responses with self.assertRaises(ScanError): data = list( helpers.scan( self.client, index="test_index", size=2, raise_on_error=True, clear_scroll=False, ) ) self.assertEqual(len(data), 3) self.assertEqual(data[-1], {"scroll_data": 42}) def test_initial_search_error(self): with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", "_shards": {"successful": 4, "total": 5, "skipped": 0}, "hits": {"hits": [{"search_data": 1}]}, } client_mock.scroll.side_effect = self.mock_scroll_responses data = list( helpers.scan( self.client, index="test_index", size=2, raise_on_error=False ) ) self.assertEqual(data, [{"search_data": 1}, {"scroll_data": 42}]) client_mock.scroll.side_effect = self.mock_scroll_responses with self.assertRaises(ScanError): data = list( helpers.scan( self.client, index="test_index", size=2, raise_on_error=True ) ) self.assertEqual(data, [{"search_data": 1}]) client_mock.scroll.assert_not_called() def test_no_scroll_id_fast_route(self): with patch.object(self, "client") as client_mock: client_mock.search.return_value = {"no": "_scroll_id"} data = list(helpers.scan(self.client, index="test_index")) self.assertEqual(data, []) client_mock.scroll.assert_not_called() client_mock.clear_scroll.assert_not_called() def test_scan_auth_kwargs_forwarded(self): for key, val in { "api_key": ("name", "value"), "http_auth": ("username", "password"), "headers": {"custom": "header"}, }.items(): with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": [{"search_data": 1}]}, } client_mock.scroll.return_value = { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": []}, } client_mock.clear_scroll.return_value = {} data = list(helpers.scan(self.client, index="test_index", **{key: val})) self.assertEqual(data, [{"search_data": 1}]) # Assert that 'search', 'scroll' and 'clear_scroll' all # received the extra kwarg related to authentication. for api_mock in ( client_mock.search, client_mock.scroll, client_mock.clear_scroll, ): self.assertEqual(api_mock.call_args[1][key], val) def test_scan_auth_kwargs_favor_scroll_kwargs_option(self): with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": [{"search_data": 1}]}, } client_mock.scroll.return_value = { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": []}, } client_mock.clear_scroll.return_value = {} data = list( helpers.scan( self.client, index="test_index", scroll_kwargs={"headers": {"scroll": "kwargs"}, "sort": "asc"}, headers={"not scroll": "kwargs"}, ) ) self.assertEqual(data, [{"search_data": 1}]) # Assert that we see 'scroll_kwargs' options used instead of 'kwargs' self.assertEqual( client_mock.scroll.call_args[1]["headers"], {"scroll": "kwargs"} ) self.assertEqual(client_mock.scroll.call_args[1]["sort"], "asc") def test_scan_duplicate_parameters(self): with patch.object(self.client, "search") as search_mock, patch.object( self.client, "scroll" ) as scroll_mock, patch.object( self.client, "clear_scroll" ) as clear_scroll_mock: search_mock.return_value = { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": [{"field": "value"}]}, } scroll_mock.return_value = { "_scroll_id": "scroll_id", "_shards": {"successful": 5, "total": 5, "skipped": 0}, "hits": {"hits": []}, } clear_scroll_mock.return_value = {"acknowledged": True} data = [ x for x in helpers.scan( self.client, index="test_index", size=10, query={"size": 1}, scroll_kwargs={"scroll": "10m", "rest_total_hits_as_int": True}, ) ] assert data == [{"field": "value"}] search_mock.assert_called_with( index="test_index", size=10, sort="_doc", scroll="5m", request_timeout=None, params={"__elastic_client_meta": (("h", "s"),)}, ) scroll_mock.assert_called_with( scroll="5m", rest_total_hits_as_int=True, params={"__elastic_client_meta": (("h", "s"),)}, scroll_id="scroll_id", ) clear_scroll_mock.assert_called_with( scroll_id="scroll_id", ignore=(404,), params={"__elastic_client_meta": (("h", "s"),)}, ) @patch("elasticsearch.helpers.actions.logger") def test_logger(self, logger_mock): bulk = [] for x in range(4): bulk.append({"index": {"_index": "test_index", "_type": "_doc"}}) bulk.append({"value": x}) self.client.bulk(bulk, refresh=True) with patch.object(self.client, "scroll") as scroll_mock: scroll_mock.side_effect = self.mock_scroll_responses list( helpers.scan( self.client, index="test_index", size=2, raise_on_error=False, clear_scroll=False, ) ) logger_mock.warning.assert_called() scroll_mock.side_effect = self.mock_scroll_responses try: list( helpers.scan( self.client, index="test_index", size=2, raise_on_error=True, clear_scroll=False, ) ) except ScanError: pass logger_mock.warning.assert_called() def test_clear_scroll(self): bulk = [] for x in range(4): bulk.append({"index": {"_index": "test_index", "_type": "_doc"}}) bulk.append({"value": x}) self.client.bulk(bulk, refresh=True) with patch.object( self.client, "clear_scroll", wraps=self.client.clear_scroll ) as spy: list(helpers.scan(self.client, index="test_index", size=2)) spy.assert_called_once() spy.reset_mock() list( helpers.scan(self.client, index="test_index", size=2, clear_scroll=True) ) spy.assert_called_once() spy.reset_mock() list( helpers.scan( self.client, index="test_index", size=2, clear_scroll=False ) ) spy.assert_not_called() def test_shards_no_skipped_field(self): with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", "_shards": {"successful": 5, "total": 5}, "hits": {"hits": [{"search_data": 1}]}, } client_mock.scroll.side_effect = [ { "_scroll_id": "dummy_id", "_shards": {"successful": 5, "total": 5}, "hits": {"hits": [{"scroll_data": 42}]}, }, { "_scroll_id": "dummy_id", "_shards": {"successful": 5, "total": 5}, "hits": {"hits": []}, }, ] data = list( helpers.scan( self.client, index="test_index", size=2, raise_on_error=True ) ) self.assertEqual(data, [{"search_data": 1}, {"scroll_data": 42}]) class TestReindex(ElasticsearchTestCase): def setup_method(self, _): bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_type": "_doc", "_id": x}}) bulk.append( { "answer": x, "correct": x == 42, "type": "answers" if x % 2 == 0 else "questions", } ) self.client.bulk(bulk, refresh=True) def test_reindex_passes_kwargs_to_scan_and_bulk(self): helpers.reindex( self.client, "test_index", "prod_index", scan_kwargs={"q": "type:answers"}, bulk_kwargs={"refresh": True}, ) self.assertTrue(self.client.indices.exists("prod_index")) self.assertEqual( 50, self.client.count(index="prod_index", q="type:answers")["count"] ) self.assertEqual( {"answer": 42, "correct": True, "type": "answers"}, self.client.get(index="prod_index", id=42)["_source"], ) def test_reindex_accepts_a_query(self): helpers.reindex( self.client, "test_index", "prod_index", query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, ) self.client.indices.refresh() self.assertTrue(self.client.indices.exists("prod_index")) self.assertEqual( 50, self.client.count(index="prod_index", q="type:answers")["count"] ) self.assertEqual( {"answer": 42, "correct": True, "type": "answers"}, self.client.get(index="prod_index", id=42)["_source"], ) def test_all_documents_get_moved(self): helpers.reindex(self.client, "test_index", "prod_index") self.client.indices.refresh() self.assertTrue(self.client.indices.exists("prod_index")) self.assertEqual( 50, self.client.count(index="prod_index", q="type:questions")["count"] ) self.assertEqual( 50, self.client.count(index="prod_index", q="type:answers")["count"] ) self.assertEqual( {"answer": 42, "correct": True, "type": "answers"}, self.client.get(index="prod_index", id=42)["_source"], ) class TestParentChildReindex(ElasticsearchTestCase): def setup_method(self, _): body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { "properties": { "question_answer": { "type": "join", "relations": {"question": "answer"}, } } }, } self.client.indices.create(index="test-index", body=body) self.client.indices.create(index="real-index", body=body) self.client.index( index="test-index", id=42, body={"question_answer": "question"} ) self.client.index( index="test-index", id=47, routing=42, body={"some": "data", "question_answer": {"name": "answer", "parent": 42}}, ) self.client.indices.refresh(index="test-index") def test_children_are_reindexed_correctly(self): helpers.reindex(self.client, "test-index", "real-index") q = self.client.get(index="real-index", id=42) self.assertEqual( { "_id": "42", "_index": "real-index", "_primary_term": 1, "_seq_no": 0, "_source": {"question_answer": "question"}, "_type": "_doc", "_version": 1, "found": True, }, q, ) q = self.client.get(index="test-index", id=47, routing=42) self.assertEqual( { "_routing": "42", "_id": "47", "_index": "test-index", "_primary_term": 1, "_seq_no": 1, "_source": { "some": "data", "question_answer": {"name": "answer", "parent": 42}, }, "_type": "_doc", "_version": 1, "found": True, }, q, ) @pytest.fixture(scope="function") def reindex_data_stream_setup(sync_client): dt = datetime.now(tz=tz.UTC) bulk = [] for x in range(100): bulk.append({"index": {"_index": "test_index_stream", "_id": x}}) bulk.append( { "answer": x, "correct": x == 42, "type": "answers" if x % 2 == 0 else "questions", "@timestamp": (dt - timedelta(days=x)).isoformat(), } ) sync_client.bulk(bulk, refresh=True) sync_client.indices.put_index_template( name="my-index-template", body={ "index_patterns": ["py-*-*"], "data_stream": {}, }, ) sync_client.indices.create_data_stream(name="py-test-stream") sync_client.indices.refresh() class TestDataStreamReindex(object): @pytest.mark.usefixtures("reindex_data_stream_setup") @pytest.mark.parametrize("op_type", [None, "create"]) def test_reindex_index_datastream(self, op_type, sync_client): helpers.reindex( sync_client, source_index="test_index_stream", target_index="py-test-stream", query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, op_type=op_type, ) sync_client.indices.refresh() assert sync_client.indices.exists(index="py-test-stream") assert ( 50 == sync_client.count(index="py-test-stream", q="type:answers")["count"] ) @pytest.mark.usefixtures("reindex_data_stream_setup") def test_reindex_index_datastream_op_type_index(self, sync_client): with pytest.raises( ValueError, match="Data streams must have 'op_type' set to 'create'" ): helpers.reindex( sync_client, source_index="test_index_stream", target_index="py-test-stream", query={"query": {"bool": {"filter": {"term": {"type": "answers"}}}}}, op_type="_index", ) @pytest.mark.parametrize( "scan_kwargs", [ {"from": 1}, {"from_": 1}, {"query": {"from": 1}}, {"query": {"from_": 1}}, {"query": {"query": {"match_all": {}}}, "from": 1}, {"query": {"query": {"match_all": {}}}, "from_": 1}, ], ) def test_scan_from_keyword_is_aliased(sync_client, scan_kwargs): with patch.object( sync_client, "search", return_value={ "_scroll_id": "dummy_id", "_shards": {"successful": 5, "total": 5}, "hits": {"hits": []}, }, ) as search_mock, patch.object(sync_client, "clear_scroll"): list(helpers.scan(sync_client, index="test_index", **scan_kwargs)) assert search_mock.call_args[1]["from_"] == 1 assert "from" not in search_mock.call_args[1] elasticsearch-py-7.17.6/test_elasticsearch/test_server/test_mapbox_vector_tile.py000066400000000000000000000142271426163262700306070ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import re from unittest import SkipTest import pytest from mock import patch from elasticsearch import ( Elasticsearch, RequestError, RequestsHttpConnection, Urllib3HttpConnection, ) from elasticsearch.helpers.test import CA_CERTS, ELASTICSEARCH_URL @pytest.fixture(scope="function") def mvt_setup(sync_client): sync_client.indices.create( index="museums", body={ "mappings": { "properties": { "location": {"type": "geo_point"}, "name": {"type": "keyword"}, "price": {"type": "long"}, "included": {"type": "boolean"}, } } }, ) sync_client.bulk( index="museums", body=[ {"index": {"_id": "1"}}, { "location": "52.374081,4.912350", "name": "NEMO Science Museum", "price": 1750, "included": True, }, {"index": {"_id": "2"}}, { "location": "52.369219,4.901618", "name": "Museum Het Rembrandthuis", "price": 1500, "included": False, }, {"index": {"_id": "3"}}, { "location": "52.371667,4.914722", "name": "Nederlands Scheepvaartmuseum", "price": 1650, "included": True, }, {"index": {"_id": "4"}}, { "location": "52.371667,4.914722", "name": "Amsterdam Centre for Architecture", "price": 0, "included": True, }, ], refresh=True, ) @pytest.mark.parametrize( "connection_class", [Urllib3HttpConnection, RequestsHttpConnection] ) def test_mapbox_vector_tile_logging(mvt_setup, connection_class): client = Elasticsearch( ELASTICSEARCH_URL, connection_class=connection_class, ca_certs=CA_CERTS ) client.info() with patch("elasticsearch.connection.base.logger") as logger: client.search_mvt( index="museums", zoom=13, x=4207, y=2692, field="location", ) assert logger.info.call_count == 1 assert re.search( r"^POST https?://[^/]+/museums/_mvt/location/13/4207/2692 \[status:200 request:0\.[0-9]{3}s\]$", logger.info.call_args_list[0][0][0] % logger.info.call_args_list[0][0][1:], ) assert logger.debug.call_count == 2 req, resp = logger.debug.call_args_list assert req[0] == ("> %s", None) assert re.search( r"< b'.+'$", resp[0][0] % (resp[0][1:]), flags=re.DOTALL, ) # Errors should still be JSON with patch("elasticsearch.connection.base.logger") as logger, pytest.raises( RequestError ) as e: client.search_mvt( index="museums", zoom=-100, x=4207, y=2692, field="location", ) assert e.value.info == { "error": { "root_cause": [ { "type": "illegal_argument_exception", "reason": "Invalid geotile_grid precision of -100. Must be between 0 and 29.", } ], "type": "illegal_argument_exception", "reason": "Invalid geotile_grid precision of -100. Must be between 0 and 29.", }, "status": 400, } assert e.value.status_code == 400 assert logger.warning.call_count == 1 assert re.search( r"^POST https?://[^/]+/museums/_mvt/location/-100/4207/2692 \[status:400 request:0\.[0-9]{3}s\]$", logger.warning.call_args_list[0][0][0] % logger.warning.call_args_list[0][0][1:], ) assert logger.debug.call_count == 2 req, resp = logger.debug.call_args_list assert req[0] == ("> %s", None) # The JSON error body is still logged properly. assert resp[0][0] % (resp[0][1:]) == ( '< {"error":{"root_cause":[{"type":"illegal_argument_exception","reason":"Invalid ' 'geotile_grid precision of -100. Must be between 0 and 29."}],"type":"illegal_argument_exception",' '"reason":"Invalid geotile_grid precision of -100. Must be between 0 and 29."},"status":400}' ) @pytest.mark.parametrize( "connection_class", [Urllib3HttpConnection, RequestsHttpConnection] ) def test_mapbox_vector_tile_response(mvt_setup, connection_class): client = Elasticsearch( ELASTICSEARCH_URL, connection_class=connection_class, ca_certs=CA_CERTS ) resp = client.search_mvt( index="museums", zoom=13, x=4207, y=2692, field="location", grid_precision=2, fields=["name", "price"], query={"term": {"included": True}}, aggs={ "min_price": {"min": {"field": "price"}}, "max_price": {"max": {"field": "price"}}, "avg_price": {"avg": {"field": "price"}}, }, ) assert isinstance(resp, bytes) try: import mapbox_vector_tile except ImportError: raise SkipTest("Requires the 'mapbox-vector-tile' package") # Decode the binary as MVT tile = mapbox_vector_tile.decode(resp) # Assert some general things about the structure, mostly we want # to know that we got back a valid MVT. assert set(tile.keys()) == {"hits", "aggs", "meta"} elasticsearch-py-7.17.6/test_elasticsearch/test_server/test_rest_api_spec.py000066400000000000000000000567071426163262700275530ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Dynamically generated set of TestCases based on set of yaml files describing some integration tests. These files are shared among all official Elasticsearch clients. """ import io import json import os import re import sys import warnings import zipfile import pytest import urllib3 import yaml from elasticsearch import ElasticsearchWarning, RequestError, TransportError from elasticsearch.client.utils import _COMPATIBILITY_MIMETYPE, _base64_auth_header from elasticsearch.compat import string_types from elasticsearch.helpers.test import _get_version from . import get_client # some params had to be changed in python, keep track of them so we can rename # those in the tests accordingly PARAMS_RENAMES = {"type": "doc_type", "from": "from_"} APIS_USING_TYPE_INSTEAD_OF_DOC_TYPE = { "nodes.hot_threads", "license.post_start_trial", } # mapping from catch values to http status codes CATCH_CODES = {"missing": 404, "conflict": 409, "unauthorized": 401} # test features we have implemented IMPLEMENTED_FEATURES = { "gtelte", "stash_in_path", "headers", "catch_unauthorized", "default_shards", "warnings", "allowed_warnings", "allowed_warnings_regex", "contains", "arbitrary_key", "transform_and_set", } # broken YAML tests on some releases SKIP_TESTS = { # Uses bad input intentionally "update/90_error[0]", "search/20_default_values[1]", # Warning about date_histogram.interval deprecation is raised randomly "search/aggregation/250_moving_fn[1]", # body: null "indices/simulate_index_template/10_basic[2]", # No ML node with sufficient capacity / random ML failing "ml/start_stop_datafeed", "ml/post_data", "ml/jobs_crud", "ml/datafeeds_crud", "ml/set_upgrade_mode", "ml/reset_job[2]", "ml/jobs_get_stats", "ml/get_datafeed_stats", "ml/get_trained_model_stats", "ml/delete_job_force", "ml/jobs_get_result_overall_buckets", "ml/bucket_correlation_agg[0]", "ml/job_groups", "transform/transforms_stats_continuous[0]", # Fails bad request instead of 404? "ml/inference_crud", # rollup/security_tests time out? "rollup/security_tests", # Our TLS certs are custom "ssl/10_basic[0]", # Our user is custom "users/10_basic[3]", # Shards/snapshots aren't right? "searchable_snapshots/10_usage[1]", # flaky data streams? "data_stream/10_basic[1]", "data_stream/80_resolve_index_data_streams[1]", # bad formatting? "cat/allocation/10_basic", # service account number not right? "service_accounts/10_basic[1]", # doesn't use 'contains' properly? "xpack/10_basic[0]", "privileges/40_get_user_privs[0]", "privileges/40_get_user_privs[1]", # bad use of 'is_false'? "indices/get_alias/10_basic[22]", # unique usage of 'set' "indices/stats/50_disk_usage[0]", "indices/stats/60_field_usage[0]", } APIS_WITH_BODY_FIELDS = { "search", "search_mvt", "scroll", "clear_scroll", "update", "indices.create", } XPACK_FEATURES = None ES_VERSION = None RUN_ASYNC_REST_API_TESTS = ( sys.version_info >= (3, 6) and os.environ.get("PYTHON_CONNECTION_CLASS") == "RequestsHttpConnection" ) FALSEY_VALUES = ("", None, False, 0, 0.0) # Means the client will be emitting 'application/vnd.elasticsearch;compatible-with=X' headers COMPATIBILITY_MODE_ENABLED = os.environ.get("ELASTIC_CLIENT_APIVERSIONING") in ( "1", "true", ) COMPATIBILITY_MIMETYPE = _COMPATIBILITY_MIMETYPE class YamlRunner: def __init__(self, client): self.client = client self.last_response = None self._run_code = None self._setup_code = None self._teardown_code = None self._state = {} def use_spec(self, test_spec): self._setup_code = test_spec.pop("setup", None) self._run_code = test_spec.pop("run", None) self._teardown_code = test_spec.pop("teardown", None) def setup(self): # Pull skips from individual tests to not do unnecessary setup. skip_code = [] for action in self._run_code: assert len(action) == 1 action_type, _ = list(action.items())[0] if action_type == "skip": skip_code.append(action) else: break if self._setup_code or skip_code: self.section("setup") if skip_code: self.run_code(skip_code) if self._setup_code: self.run_code(self._setup_code) def teardown(self): if self._teardown_code: self.section("teardown") self.run_code(self._teardown_code) def es_version(self): global ES_VERSION if ES_VERSION is None: version_string = (self.client.info())["version"]["number"] if "." not in version_string: return () version = version_string.strip().split(".") ES_VERSION = tuple(int(v) if v.isdigit() else 999 for v in version) return ES_VERSION def section(self, name): print(("=" * 10) + " " + name + " " + ("=" * 10)) def run(self): try: self.setup() self.section("test") self.run_code(self._run_code) finally: try: self.teardown() except Exception: pass def run_code(self, test): """Execute an instruction based on it's type.""" for action in test: assert len(action) == 1 action_type, action = list(action.items())[0] print(action_type, action) if hasattr(self, "run_" + action_type): getattr(self, "run_" + action_type)(action) else: raise RuntimeError("Invalid action type %r" % (action_type,)) def run_do(self, action): api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) warn = action.pop("warnings", ()) allowed_warnings = action.pop("allowed_warnings", ()) if isinstance(allowed_warnings, str): allowed_warnings = (allowed_warnings,) allowed_warnings_regex = action.pop("allowed_warnings_regex", ()) if isinstance(allowed_warnings_regex, str): allowed_warnings_regex = (allowed_warnings_regex,) assert len(action) == 1 # Remove the x_pack_rest_user authentication # if it's given via headers. We're already authenticated # via the 'elastic' user. if ( headers and headers.get("Authorization", None) == "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" ): headers.pop("Authorization") if headers and "Content-Type" in headers and COMPATIBILITY_MODE_ENABLED: headers["Content-Type"] = COMPATIBILITY_MIMETYPE method, args = list(action.items())[0] args["headers"] = headers # locate api endpoint for m in method.split("."): # Some deprecated APIs are prefixed with 'xpack-*' if m.startswith("xpack-"): m = m.replace("xpack-", "") assert hasattr(api, m) api = getattr(api, m) # some parameters had to be renamed to not clash with python builtins, # compensate for k in PARAMS_RENAMES: # Don't do the 'doc_type' rename for APIs that actually want 'type' if k == "type" and method in APIS_USING_TYPE_INSTEAD_OF_DOC_TYPE: continue if k in args: args[PARAMS_RENAMES[k]] = args.pop(k) # resolve vars for k in args: args[k] = self._resolve(args[k]) # If there's a body parameter given to an API with # body fields enabled we expand the body to parameters. if ( "body" in args and isinstance(args["body"], dict) and method in APIS_WITH_BODY_FIELDS ): args.update( {PARAMS_RENAMES.get(k, k): v for k, v in args.pop("body").items()} ) warnings.simplefilter("always", category=ElasticsearchWarning) with warnings.catch_warnings(record=True) as caught_warnings: try: self.last_response = api(**args) except Exception as e: if not catch: raise self.run_catch(catch, e) else: if catch: raise AssertionError( "Failed to catch %r in %r." % (catch, self.last_response) ) # Filter out warnings raised by other components. caught_warnings = [ str(w.message) for w in caught_warnings if w.category == ElasticsearchWarning and (not allowed_warnings or str(w.message) not in allowed_warnings) and ( not allowed_warnings_regex or all( re.search(pattern, str(w.message)) is None for pattern in allowed_warnings_regex ) ) ] # This warning can show up in many places but isn't accounted for # in tests, so we remove it to make sure things pass. include_type_name_warning = ( "[types removal] Using include_type_name in create index requests is deprecated. " "The parameter will be removed in the next major version." ) if ( include_type_name_warning in caught_warnings and include_type_name_warning not in warn ): caught_warnings.remove(include_type_name_warning) # Sorting removes the issue with order raised. We only care about # if all warnings are raised in the single API call. if warn and sorted(warn) != sorted(caught_warnings): raise AssertionError( "Expected warnings not equal to actual warnings: expected=%r actual=%r" % (warn, caught_warnings) ) def run_catch(self, catch, exception): if catch == "param": assert isinstance(exception, TypeError) return assert isinstance(exception, TransportError) if catch in CATCH_CODES: assert CATCH_CODES[catch] == exception.status_code elif catch[0] == "/" and catch[-1] == "/": assert ( re.search(catch[1:-1], exception.error + " " + repr(exception.info)), "%s not in %r" % (catch, exception.info), ) is not None self.last_response = exception.info def run_skip(self, skip): global IMPLEMENTED_FEATURES if "features" in skip: features = skip["features"] if not isinstance(features, (tuple, list)): features = [features] for feature in features: if feature in IMPLEMENTED_FEATURES: continue pytest.skip("feature '%s' is not supported" % feature) if "version" in skip: version, reason = skip["version"], skip["reason"] if version == "all": pytest.skip(reason) min_version, max_version = version.split("-") min_version = _get_version(min_version) or (0,) max_version = _get_version(max_version) or (999,) if min_version <= (self.es_version()) <= max_version: pytest.skip(reason) def run_gt(self, action): for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) > value def run_gte(self, action): for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) >= value def run_lt(self, action): for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) < value def run_lte(self, action): for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) <= value def run_set(self, action): for key, value in action.items(): value = self._resolve(value) self._state[value] = self._lookup(key) def run_is_false(self, action): try: value = self._lookup(action) except AssertionError: pass else: assert value in FALSEY_VALUES def run_is_true(self, action): value = self._lookup(action) assert value not in FALSEY_VALUES def run_length(self, action): for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) assert expected == len(value) def run_match(self, action): for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) if ( isinstance(expected, string_types) and expected.startswith("/") and expected.endswith("/") ): expected = re.compile(expected[1:-1], re.VERBOSE | re.MULTILINE) assert expected.search(value), "%r does not match %r" % ( value, expected, ) elif isinstance(value, list) and isinstance(expected, list): assert len(value) == len( expected ), "Length between %r and %r wasn't equal" % (value, expected) [self._assert_match_equals(a, b) for a, b in zip(value, expected)] else: self._assert_match_equals(value, expected) def run_contains(self, action): for path, expected in action.items(): value = self._lookup(path) # list[dict[str,str]] is returned expected = self._resolve(expected) # dict[str, str] if expected not in value: raise AssertionError("%s is not contained by %s" % (expected, value)) def run_transform_and_set(self, action): for key, value in action.items(): # Convert #base64EncodeCredentials(id,api_key) to ["id", "api_key"] if "#base64EncodeCredentials" in value: value = value.replace("#base64EncodeCredentials", "") value = value.replace("(", "").replace(")", "").split(",") self._state[key] = _base64_auth_header( (self._lookup(value[0]), self._lookup(value[1])) ) def _resolve(self, value): # resolve variables if isinstance(value, string_types) and "$" in value: for k, v in self._state.items(): for key_replace in ("${" + k + "}", "$" + k): if value == key_replace: value = v break # We only do the in-string replacement if using ${...} elif ( key_replace.startswith("${") and isinstance(value, string_types) and key_replace in value ): value = value.replace(key_replace, str(v)) break # We only do the in-string replacement if value is JSON string # E.g. '{\n "password_hash" : "$hash"\n}\n' elif ( key_replace.startswith("$") and isinstance(value, string_types) and key_replace in value and not value.startswith("$") ): value = value.replace(key_replace, str(v)) break if isinstance(value, string_types): value = value.strip() elif isinstance(value, dict): value = dict((k, self._resolve(v)) for (k, v) in value.items()) elif isinstance(value, list): value = list(map(self._resolve, value)) return value def _lookup(self, path): # fetch the possibly nested value from last_response value = self.last_response if path == "$body": return value path = path.replace(r"\.", "\1") for step in path.split("."): if not step: continue step = step.replace("\1", ".") step = self._resolve(step) if ( isinstance(step, string_types) and step.isdigit() and isinstance(value, list) ): step = int(step) assert isinstance(value, list) assert len(value) > step elif step == "_arbitrary_key_": return list(value.keys())[0] else: assert step in value value = value[step] return value def _feature_enabled(self, name): global XPACK_FEATURES, IMPLEMENTED_FEATURES if XPACK_FEATURES is None: try: xinfo = self.client.xpack.info() XPACK_FEATURES = set( f for f in xinfo["features"] if xinfo["features"][f]["enabled"] ) IMPLEMENTED_FEATURES.add("xpack") except RequestError: XPACK_FEATURES = set() IMPLEMENTED_FEATURES.add("no_xpack") return name in XPACK_FEATURES def _assert_match_equals(self, a, b): # Handle for large floating points with 'E' if isinstance(b, string_types) and isinstance(a, float) and "e" in repr(a): a = repr(a).replace("e+", "E") assert a == b, "%r does not match %r" % (a, b) @pytest.fixture(scope="function") def sync_runner(sync_client): return YamlRunner(sync_client) YAML_TEST_SPECS = [] # Try loading the REST API test specs from the Elastic Artifacts API try: # Construct the HTTP and Elasticsearch client http = urllib3.PoolManager( retries=10, headers=urllib3.util.make_headers(accept_encoding=True) ) client = get_client() # If we're running in compatibility mode the server won't have the previous versions' # test suite so we use the overridden 'STACK_VERSION' in run-repository.sh instead. if os.environ.get("STACK_VERSION") and COMPATIBILITY_MODE_ENABLED: version_number = os.environ["STACK_VERSION"] # Setting 'build_hash' to empty means we'll always get the latest build. build_hash = "" else: # Make a request to Elasticsearch for the build hash, we'll be looking for # an artifact with this same hash to download test specs for. client_info = client.info() version_number = client_info["version"]["number"] build_hash = client_info["version"]["build_hash"] # Now talk to the artifacts API with the 'STACK_VERSION' environment variable resp = http.request( "GET", "https://artifacts-api.elastic.co/v1/versions/%s" % (version_number,), ) resp = json.loads(resp.data.decode("utf-8")) # Look through every build and see if one matches the commit hash # we're looking for. If not it's okay, we'll just use the latest and # hope for the best! builds = resp["version"]["builds"] for build in builds: if build["projects"]["elasticsearch"]["commit_hash"] == build_hash: break else: build = builds[0] # Use the latest # Now we're looking for the 'rest-api-spec--sources.jar' file # to download and extract in-memory. packages = build["projects"]["elasticsearch"]["packages"] for package in packages: if re.match(r"rest-resources-zip-.*\.zip", package): package_url = packages[package]["url"] break else: raise RuntimeError( "Could not find the package 'rest-resources-zip-*.zip' in build %r" % build ) # Download the zip and start reading YAML from the files in memory package_zip = zipfile.ZipFile(io.BytesIO(http.request("GET", package_url).data)) for yaml_file in package_zip.namelist(): yaml_filter_pattern = r"^rest-api-spec/%s/.*\.ya?ml$" % ( "compatTest" if COMPATIBILITY_MODE_ENABLED else "test" ) if not re.match(yaml_filter_pattern, yaml_file): continue yaml_tests = list(yaml.safe_load_all(package_zip.read(yaml_file))) # Each file may have a "test" named 'setup' or 'teardown', # these sets of steps should be run at the beginning and end # of every other test within the file so we do one pass to capture those. setup_steps = teardown_steps = None test_numbers_and_steps = [] test_number = 0 for yaml_test in yaml_tests: test_name, test_step = yaml_test.popitem() if test_name == "setup": setup_steps = test_step elif test_name == "teardown": teardown_steps = test_step else: test_numbers_and_steps.append((test_number, test_step)) test_number += 1 # Now we combine setup, teardown, and test_steps into # a set of pytest.param() instances for test_number, test_step in test_numbers_and_steps: # Build the id from the name of the YAML file and # the number within that file. Most important step # is to remove most of the file path prefixes and # the .yml suffix. pytest_test_name = yaml_file.rpartition(".")[0].replace(".", "/") for prefix in ( "rest-api-spec/", "compatTest/", "test/", "free/", "platinum/", ): if pytest_test_name.startswith(prefix): pytest_test_name = pytest_test_name[len(prefix) :] pytest_param_id = "%s[%d]" % (pytest_test_name, test_number) pytest_param = { "setup": setup_steps, "run": test_step, "teardown": teardown_steps, } # Skip either 'test_name' or 'test_name[x]' if pytest_test_name in SKIP_TESTS or pytest_param_id in SKIP_TESTS: pytest_param["skip"] = True YAML_TEST_SPECS.append(pytest.param(pytest_param, id=pytest_param_id)) except Exception as e: warnings.warn("Could not load REST API tests: %s" % (str(e),)) # Sort the tests by ID so they're grouped together nicely. YAML_TEST_SPECS = sorted(YAML_TEST_SPECS, key=lambda param: param.id) if not RUN_ASYNC_REST_API_TESTS: @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) def test_rest_api_spec(test_spec, sync_runner): if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") sync_runner.use_spec(test_spec) sync_runner.run() elasticsearch-py-7.17.6/test_elasticsearch/test_transport.py000066400000000000000000001002371426163262700244060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals import json import time import warnings import pytest from mock import patch from elasticsearch.connection import Connection from elasticsearch.connection_pool import DummyConnectionPool from elasticsearch.exceptions import ( AuthenticationException, AuthorizationException, ConnectionError, ElasticsearchWarning, NotFoundError, TransportError, UnsupportedProductError, ) from elasticsearch.transport import Transport, _ProductChecker, get_host_info from .test_cases import TestCase class DummyConnection(Connection): def __init__(self, **kwargs): self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) self.delay = kwargs.pop("delay", None) self.calls = [] super(DummyConnection, self).__init__(**kwargs) def perform_request(self, *args, **kwargs): self.calls.append((args, kwargs)) if self.delay is not None: time.sleep(self.delay) if self.exception: raise self.exception return self.status, self.headers, self.data CLUSTER_NODES = """{ "_nodes" : { "total" : 1, "successful" : 1, "failed" : 0 }, "cluster_name" : "elasticsearch", "nodes" : { "SRZpKFZdQguhhvifmN6UVA" : { "name" : "SRZpKFZ", "transport_address" : "127.0.0.1:9300", "host" : "127.0.0.1", "ip" : "127.0.0.1", "version" : "5.0.0", "build_hash" : "253032b", "roles" : [ "master", "data", "ingest" ], "http" : { "bound_address" : [ "[fe80::1]:9200", "[::1]:9200", "127.0.0.1:9200" ], "publish_address" : "1.1.1.1:123", "max_content_length_in_bytes" : 104857600 } } } }""" CLUSTER_NODES_7x_PUBLISH_HOST = """{ "_nodes" : { "total" : 1, "successful" : 1, "failed" : 0 }, "cluster_name" : "elasticsearch", "nodes" : { "SRZpKFZdQguhhvifmN6UVA" : { "name" : "SRZpKFZ", "transport_address" : "127.0.0.1:9300", "host" : "127.0.0.1", "ip" : "127.0.0.1", "version" : "5.0.0", "build_hash" : "253032b", "roles" : [ "master", "data", "ingest" ], "http" : { "bound_address" : [ "[fe80::1]:9200", "[::1]:9200", "127.0.0.1:9200" ], "publish_address" : "somehost.tld/1.1.1.1:123", "max_content_length_in_bytes" : 104857600 } } } }""" class TestHostsInfoCallback(TestCase): def test_master_only_nodes_are_ignored(self): nodes = [ {"roles": ["master"]}, {"roles": ["master", "data", "ingest"]}, {"roles": ["data", "ingest"]}, {"roles": []}, {}, ] chosen = [ i for i, node_info in enumerate(nodes) if get_host_info(node_info, i) is not None ] self.assertEqual([1, 2, 3, 4], chosen) class TestTransport(TestCase): def test_single_connection_uses_dummy_connection_pool(self): t = Transport([{}]) t._verified_elasticsearch = True self.assertIsInstance(t.connection_pool, DummyConnectionPool) t = Transport([{"host": "localhost"}]) t._verified_elasticsearch = True self.assertIsInstance(t.connection_pool, DummyConnectionPool) def test_request_timeout_extracted_from_params_and_passed(self): t = Transport([{}], meta_header=False, connection_class=DummyConnection) t._verified_elasticsearch = True t.perform_request("GET", "/", params={"request_timeout": 42}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("GET", "/", {}, None), t.get_connection().calls[0][0]) self.assertEqual( {"timeout": 42, "ignore": (), "headers": None}, t.get_connection().calls[0][1], ) def test_opaque_id(self): t = Transport( [{}], opaque_id="app-1", meta_header=False, connection_class=DummyConnection ) t._verified_elasticsearch = True t.perform_request("GET", "/") self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("GET", "/", None, None), t.get_connection().calls[0][0]) self.assertEqual( {"timeout": None, "ignore": (), "headers": None}, t.get_connection().calls[0][1], ) # Now try with an 'x-opaque-id' set on perform_request(). t.perform_request("GET", "/", headers={"x-opaque-id": "request-1"}) self.assertEqual(2, len(t.get_connection().calls)) self.assertEqual(("GET", "/", None, None), t.get_connection().calls[1][0]) self.assertEqual( {"timeout": None, "ignore": (), "headers": {"x-opaque-id": "request-1"}}, t.get_connection().calls[1][1], ) def test_request_with_custom_user_agent_header(self): t = Transport([{}], meta_header=False, connection_class=DummyConnection) t._verified_elasticsearch = True t.perform_request("GET", "/", headers={"user-agent": "my-custom-value/1.2.3"}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual( { "timeout": None, "ignore": (), "headers": {"user-agent": "my-custom-value/1.2.3"}, }, t.get_connection().calls[0][1], ) def test_send_get_body_as_source(self): with warnings.catch_warnings(record=True) as w: t = Transport( [{}], send_get_body_as="source", connection_class=DummyConnection ) assert len(w) == 1 assert str(w[0].message) == ( "The 'send_get_body_as' parameter is no longer necessary and will be removed in 8.0" ) t._verified_elasticsearch = True t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual( ("GET", "/", {"source": "{}"}, None), t.get_connection().calls[0][0] ) def test_send_get_body_as_post(self): with warnings.catch_warnings(record=True) as w: t = Transport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) assert len(w) == 1 assert str(w[0].message) == ( "The 'send_get_body_as' parameter is no longer necessary and will be removed in 8.0" ) t._verified_elasticsearch = True t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("POST", "/", None, b"{}"), t.get_connection().calls[0][0]) def test_client_meta_header(self): t = Transport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) headers = t.get_connection().calls[0][1]["headers"] self.assertRegexpMatches( headers["x-elastic-client-meta"], r"^es=[0-9.]+p?,py=[0-9.]+p?,t=[0-9.]+p?$" ) class DummyConnectionWithMeta(DummyConnection): HTTP_CLIENT_META = ("dm", "1.2.3") t = Transport([{}], connection_class=DummyConnectionWithMeta) t._verified_elasticsearch = True t.perform_request("GET", "/", body={}, headers={"Custom": "header"}) self.assertEqual(1, len(t.get_connection().calls)) headers = t.get_connection().calls[0][1]["headers"] self.assertRegexpMatches( headers["x-elastic-client-meta"], r"^es=[0-9.]+p?,py=[0-9.]+p?,t=[0-9.]+p?,dm=1.2.3$", ) self.assertEqual(headers["Custom"], "header") def test_client_meta_header_not_sent(self): t = Transport([{}], meta_header=False, connection_class=DummyConnection) t._verified_elasticsearch = True t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) headers = t.get_connection().calls[0][1]["headers"] self.assertIs(headers, None) def test_meta_header_type_error(self): with pytest.raises(TypeError) as e: Transport([{}], meta_header=1) assert str(e.value) == "meta_header must be of type bool" def test_body_gets_encoded_into_bytes(self): t = Transport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True t.perform_request("GET", "/", body="你好") self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual( ("GET", "/", None, b"\xe4\xbd\xa0\xe5\xa5\xbd"), t.get_connection().calls[0][0], ) def test_body_bytes_get_passed_untouched(self): t = Transport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True body = b"\xe4\xbd\xa0\xe5\xa5\xbd" t.perform_request("GET", "/", body=body) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("GET", "/", None, body), t.get_connection().calls[0][0]) def test_body_surrogates_replaced_encoded_into_bytes(self): t = Transport([{}], connection_class=DummyConnection) t._verified_elasticsearch = True t.perform_request("GET", "/", body="你好\uda6a") self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual( ("GET", "/", None, b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa"), t.get_connection().calls[0][0], ) def test_kwargs_passed_on_to_connections(self): t = Transport([{"host": "google.com"}], port=123) t._verified_elasticsearch = True self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://google.com:123", t.connection_pool.connections[0].host) def test_kwargs_passed_on_to_connection_pool(self): dt = object() t = Transport([{}, {}], dead_timeout=dt) t._verified_elasticsearch = True self.assertIs(dt, t.connection_pool.dead_timeout) def test_custom_connection_class(self): class MyConnection(object): def __init__(self, **kwargs): self.kwargs = kwargs t = Transport([{}], connection_class=MyConnection) t._verified_elasticsearch = True self.assertEqual(1, len(t.connection_pool.connections)) self.assertIsInstance(t.connection_pool.connections[0], MyConnection) def test_add_connection(self): t = Transport([{}], randomize_hosts=False) t._verified_elasticsearch = True t.add_connection({"host": "google.com", "port": 1234}) self.assertEqual(2, len(t.connection_pool.connections)) self.assertEqual( "http://google.com:1234", t.connection_pool.connections[1].host ) def test_request_will_fail_after_X_retries(self): t = Transport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, ) t._verified_elasticsearch = True self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(4, len(t.get_connection().calls)) def test_failed_connection_will_be_marked_as_dead(self): t = Transport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, ) t._verified_elasticsearch = True self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(0, len(t.connection_pool.connections)) def test_resurrected_connection_will_be_marked_as_live_on_success(self): for method in ("GET", "HEAD"): t = Transport([{}, {}], connection_class=DummyConnection) t._verified_elasticsearch = True con1 = t.connection_pool.get_connection() con2 = t.connection_pool.get_connection() t.connection_pool.mark_dead(con1) t.connection_pool.mark_dead(con2) t.perform_request(method, "/") self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual(1, len(t.connection_pool.dead_count)) def test_sniff_will_use_seed_connections(self): t = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) t._verified_elasticsearch = True t.set_connections([{"data": "invalid"}]) t.sniff_hosts() self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_fetches_and_uses_nodes_list(self): t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, ) t._verified_elasticsearch = True self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) def test_sniff_on_start_ignores_sniff_timeout(self): t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, sniff_timeout=12, ) t._verified_elasticsearch = True self.assertEqual( (("GET", "/_nodes/_all/http"), {"timeout": None}), t.seed_connections[0].calls[0], ) def test_sniff_uses_sniff_timeout(self): t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_timeout=42, ) t._verified_elasticsearch = True t.sniff_hosts() self.assertEqual( (("GET", "/_nodes/_all/http"), {"timeout": 42}), t.seed_connections[0].calls[0], ) def test_sniff_reuses_connection_instances_if_possible(self): t = Transport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, randomize_hosts=False, ) t._verified_elasticsearch = True connection = t.connection_pool.connections[1] t.sniff_hosts() self.assertEqual(1, len(t.connection_pool.connections)) self.assertIs(connection, t.get_connection()) def test_sniff_on_fail_triggers_sniffing_on_fail(self): t = Transport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=0, randomize_hosts=False, ) t._verified_elasticsearch = True self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) @patch("elasticsearch.transport.Transport.sniff_hosts") def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): sniff_hosts.side_effect = [TransportError("sniff failed")] t = Transport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=3, randomize_hosts=False, ) t._verified_elasticsearch = True conn_err, conn_data = t.connection_pool.connections response = t.perform_request("GET", "/") self.assertEqual(json.loads(CLUSTER_NODES), response) self.assertEqual(1, sniff_hosts.call_count) self.assertEqual(1, len(conn_err.calls)) self.assertEqual(1, len(conn_data.calls)) def test_sniff_after_n_seconds(self): t = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniffer_timeout=5, ) t._verified_elasticsearch = True for _ in range(4): t.perform_request("GET", "/") self.assertEqual(1, len(t.connection_pool.connections)) self.assertIsInstance(t.get_connection(), DummyConnection) t.last_sniff = time.time() - 5.1 t.perform_request("GET", "/") self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) self.assertTrue(time.time() - 1 < t.last_sniff < time.time() + 0.01) def test_sniff_7x_publish_host(self): # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. t = Transport( [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, ) t._verified_elasticsearch = True t.sniff_hosts() # Ensure we parsed out the fqdn and port from the fqdn/ip:port string. self.assertEqual( t.connection_pool.connection_opts[0][1], {"host": "somehost.tld", "port": 123}, ) @patch("elasticsearch.transport.Transport.sniff_hosts") def test_sniffing_disabled_on_cloud_instances(self, sniff_hosts): t = Transport( [{}], sniff_on_start=True, sniff_on_connection_fail=True, cloud_id="cluster:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5NyQ0ZmE4ODIxZTc1NjM0MDMyYmVkMWNmMjIxMTBlMmY5Ng==", ) t._verified_elasticsearch = True self.assertFalse(t.sniff_on_connection_fail) self.assertIs(sniff_hosts.call_args, None) # Assert not called. TAGLINE = "You Know, for Search" @pytest.mark.parametrize( ["headers", "response", "product_error"], [ # All empty. ({}, {}, _ProductChecker.UNSUPPORTED_PRODUCT), # Don't check the product header immediately, need to check version first. ( {"x-elastic-product": "Elasticsearch"}, {}, _ProductChecker.UNSUPPORTED_PRODUCT, ), # Version not there. ({}, {"tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_PRODUCT), # Version is nonsense ( {}, {"version": "1.0.0", "tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_PRODUCT, ), # Version number not there ({}, {"version": {}, "tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_PRODUCT), # Version number is nonsense ( {}, {"version": {"number": "nonsense"}, "tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_PRODUCT, ), # Version number way in the past ( {}, {"version": {"number": "1.0.0"}, "tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_PRODUCT, ), # Version number way in the future ( {}, {"version": {"number": "999.0.0"}, "tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_PRODUCT, ), # Build flavor not supposed to be missing ( {}, {"version": {"number": "7.13.0"}, "tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_DISTRIBUTION, ), # Build flavor is 'oss' ( {}, { "version": {"number": "7.10.0", "build_flavor": "oss"}, "tagline": TAGLINE, }, _ProductChecker.UNSUPPORTED_DISTRIBUTION, ), # Build flavor is nonsense ( {}, { "version": {"number": "7.13.0", "build_flavor": "nonsense"}, "tagline": TAGLINE, }, _ProductChecker.UNSUPPORTED_DISTRIBUTION, ), # Tagline is nonsense ( {}, {"version": {"number": "7.1.0-SNAPSHOT"}, "tagline": "nonsense"}, _ProductChecker.UNSUPPORTED_PRODUCT, ), # Product header is not supposed to be missing ( {}, {"version": {"number": "7.14.0"}, "tagline": "You Know, for Search"}, _ProductChecker.UNSUPPORTED_PRODUCT, ), # Product header is nonsense ( {"x-elastic-product": "nonsense"}, {"version": {"number": "7.15.0"}, "tagline": TAGLINE}, _ProductChecker.UNSUPPORTED_PRODUCT, ), ], ) def test_verify_elasticsearch_errors(headers, response, product_error): assert _ProductChecker.check_product(headers, response) == product_error @pytest.mark.parametrize( ["headers", "response"], [ ({}, {"version": {"number": "6.0.0"}, "tagline": TAGLINE}), ({}, {"version": {"number": "6.99.99"}, "tagline": TAGLINE}), ( {}, { "version": {"number": "7.0.0", "build_flavor": "default"}, "tagline": TAGLINE, }, ), ( {}, { "version": {"number": "7.13.99", "build_flavor": "default"}, "tagline": TAGLINE, }, ), ( {"x-elastic-product": "Elasticsearch"}, { "version": {"number": "7.14.0", "build_flavor": "default"}, "tagline": TAGLINE, }, ), ( {"x-elastic-product": "Elasticsearch"}, { "version": {"number": "7.99.99", "build_flavor": "default"}, "tagline": TAGLINE, }, ), ( {"x-elastic-product": "Elasticsearch"}, { "version": {"number": "8.0.0"}, }, ), ], ) def test_verify_elasticsearch_passes(headers, response): result = _ProductChecker.check_product(headers, response) assert result == _ProductChecker.SUCCESS assert result is True @pytest.mark.parametrize( ["headers", "data"], [ ( {}, '{"version":{"number":"6.99.0"},"tagline":"You Know, for Search"}', ), ( {}, """{ "name" : "io", "cluster_name" : "elasticsearch", "cluster_uuid" : "HaMHUswUSGGnzla8B17Iqw", "version" : { "number" : "7.6.0", "build_flavor" : "default", "build_type" : "tar", "build_hash" : "7f634e9f44834fbc12724506cc1da681b0c3b1e3", "build_date" : "2020-02-06T00:09:00.449973Z", "build_snapshot" : false, "lucene_version" : "8.4.0", "minimum_wire_compatibility_version" : "6.8.0", "minimum_index_compatibility_version" : "6.0.0-beta1" }, "tagline" : "You Know, for Search" }""", ), ( {}, '{"version":{"number":"7.13.0","build_flavor":"default"},"tagline":"You Know, for Search"}', ), ( {"X-elastic-product": "Elasticsearch"}, '{"version":{"number":"7.14.0","build_flavor":"default"},"tagline":"You Know, for Search"}', ), ], ) def test_verify_elasticsearch(headers, data): t = Transport( [{"data": data, "headers": headers}], connection_class=DummyConnection ) t.perform_request("GET", "/_search") assert t._verified_elasticsearch is True calls = t.connection_pool.connections[0].calls _ = [call[1]["headers"].pop("x-elastic-client-meta") for call in calls] assert calls == [ ( ("GET", "/"), { "headers": { "accept": "application/json", }, "timeout": None, }, ), ( ("GET", "/_search", None, None), { "headers": {}, "ignore": (), "timeout": None, }, ), ] @pytest.mark.parametrize( "exception_cls", [AuthorizationException, AuthenticationException] ) def test_verify_elasticsearch_skips_on_auth_errors(exception_cls): t = Transport( [{"exception": exception_cls(exception_cls.status_code)}], connection_class=DummyConnection, ) with pytest.warns(ElasticsearchWarning) as warns: with pytest.raises(exception_cls): t.perform_request( "GET", "/_search", headers={"Authorization": "testme"}, params={"request_timeout": 3}, ) # Assert that a warning was raised due to security privileges assert [str(w.message) for w in warns] == [ "The client is unable to verify that the server is " "Elasticsearch due security privileges on the server side" ] # Assert that the cluster is "verified" assert t._verified_elasticsearch is True # See that the headers were passed along to the "info" request made calls = t.connection_pool.connections[0].calls _ = [call[1]["headers"].pop("x-elastic-client-meta") for call in calls] assert calls == [ ( ("GET", "/"), { "headers": { "accept": "application/json", "authorization": "testme", }, "timeout": 3, }, ), ( ("GET", "/_search", {}, None), { "headers": { "Authorization": "testme", }, "ignore": (), "timeout": 3, }, ), ] def test_multiple_requests_verify_elasticsearch_success(): try: import threading except ImportError: return pytest.skip("Requires the 'threading' module") t = Transport( [ { "data": '{"version":{"number":"7.13.0","build_flavor":"default"},"tagline":"You Know, for Search"}', "delay": 1, } ], connection_class=DummyConnection, ) results = [] completed_at = [] class RequestThread(threading.Thread): def run(self): try: results.append(t.perform_request("GET", "/_search")) except Exception as e: results.append(e) completed_at.append(time.time()) # Execute a bunch of requests concurrently. threads = [] start_time = time.time() for _ in range(10): thread = RequestThread() thread.start() threads.append(thread) for thread in threads: thread.join() end_time = time.time() # Exactly 10 results completed assert len(results) == 10 # No errors in the results assert all(isinstance(result, dict) for result in results) # Assert that this took longer than 2 seconds but less than 2.1 seconds duration = end_time - start_time assert 2 <= duration <= 2.1 # Assert that every result came after ~2 seconds, no fast completions. assert all( 2 <= completed_time - start_time <= 2.1 for completed_time in completed_at ) # Assert that the cluster is "verified" assert t._verified_elasticsearch is True # See that the first request is always 'GET /' for ES check calls = t.connection_pool.connections[0].calls assert calls[0][0] == ("GET", "/") # The rest of the requests are 'GET /_search' afterwards assert all(call[0][:2] == ("GET", "/_search") for call in calls[1:]) @pytest.mark.parametrize( ["build_flavor", "tagline", "product_error", "error_message"], [ ( "default", "BAD TAGLINE", _ProductChecker.UNSUPPORTED_PRODUCT, "The client noticed that the server is not Elasticsearch and we do not support this unknown product", ), ( "BAD BUILD FLAVOR", "BAD TAGLINE", _ProductChecker.UNSUPPORTED_PRODUCT, "The client noticed that the server is not Elasticsearch and we do not support this unknown product", ), ( "BAD BUILD FLAVOR", "You Know, for Search", _ProductChecker.UNSUPPORTED_DISTRIBUTION, "The client noticed that the server is not a supported distribution of Elasticsearch", ), ], ) def test_multiple_requests_verify_elasticsearch_product_error( build_flavor, tagline, product_error, error_message ): try: import threading except ImportError: return pytest.skip("Requires the 'threading' module") t = Transport( [ { "data": '{"version":{"number":"7.13.0","build_flavor":"%s"},"tagline":"%s"}' % (build_flavor, tagline), "delay": 1, } ], connection_class=DummyConnection, ) results = [] completed_at = [] class RequestThread(threading.Thread): def run(self): try: results.append(t.perform_request("GET", "/_search")) except Exception as e: results.append(e) completed_at.append(time.time()) # Execute a bunch of requests concurrently. threads = [] start_time = time.time() for _ in range(10): thread = RequestThread() thread.start() threads.append(thread) for thread in threads: thread.join() end_time = time.time() # Exactly 10 results completed assert len(results) == 10 # All results were errors assert all(isinstance(result, UnsupportedProductError) for result in results) assert all(str(result) == error_message for result in results) # Assert that one request was made but not 2 requests. duration = end_time - start_time assert 1 <= duration <= 1.1 # Assert that every result came after ~1 seconds, no fast completions. assert all( 1 <= completed_time - start_time <= 1.1 for completed_time in completed_at ) # Assert that the cluster is definitely not Elasticsearch assert t._verified_elasticsearch == product_error # See that the first request is always 'GET /' for ES check calls = t.connection_pool.connections[0].calls assert calls[0][0] == ("GET", "/") # The rest of the requests are 'GET /_search' afterwards assert all(call[0][:2] == ("GET", "/_search") for call in calls[1:]) @pytest.mark.parametrize("error_cls", [ConnectionError, NotFoundError]) def test_multiple_requests_verify_elasticsearch_retry_on_errors(error_cls): try: import threading except ImportError: return pytest.skip("Requires the 'threading' module") t = Transport( [ { "exception": error_cls(), "delay": 0.1, } ], connection_class=DummyConnection, ) results = [] completed_at = [] class RequestThread(threading.Thread): def run(self): try: results.append(t.perform_request("GET", "/_search")) except Exception as e: results.append(e) completed_at.append(time.time()) # Execute a bunch of requests concurrently. threads = [] start_time = time.time() for _ in range(5): thread = RequestThread() thread.start() threads.append(thread) for thread in threads: thread.join() end_time = time.time() # Exactly 5 results completed assert len(results) == 5 # All results were errors and not wrapped in 'UnsupportedProductError' assert all(isinstance(result, error_cls) for result in results) # Assert that 5 requests were made in total (5 transport requests per x 0.1s/conn request) duration = end_time - start_time assert 0.5 <= duration <= 0.6 # Assert that the cluster is still in the unknown/unverified stage. assert t._verified_elasticsearch is None # See that the API isn't hit, instead it's the index requests that are failing. calls = t.connection_pool.connections[0].calls assert len(calls) == 5 assert all(call[0] == ("GET", "/") for call in calls) elasticsearch-py-7.17.6/test_elasticsearch/test_types/000077500000000000000000000000001426163262700231415ustar00rootroot00000000000000elasticsearch-py-7.17.6/test_elasticsearch/test_types/README.md000066400000000000000000000003561426163262700244240ustar00rootroot00000000000000# Type Hints All of these scripts are used to test the type hinting distributed with the `elasticsearch` package. These scripts simulate normal usage of the client and are run through `mypy --strict` as a part of continuous integration. elasticsearch-py-7.17.6/test_elasticsearch/test_types/aliased_types.py000066400000000000000000000116631426163262700263500ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, AsyncGenerator, Dict, Generator from elasticsearch7 import ( AIOHttpConnection, AsyncElasticsearch, AsyncTransport, ConnectionPool, Elasticsearch, RequestsHttpConnection, Transport, ) from elasticsearch7.helpers import ( async_bulk, async_reindex, async_scan, async_streaming_bulk, bulk, reindex, scan, streaming_bulk, ) es = Elasticsearch( [{"host": "localhost", "port": 9443}], transport_class=Transport, ) t = Transport( [{}], connection_class=RequestsHttpConnection, connection_pool_class=ConnectionPool, sniff_on_start=True, sniffer_timeout=0.1, sniff_timeout=1, sniff_on_connection_fail=False, max_retries=1, retry_on_status={100, 400, 503}, retry_on_timeout=True, send_get_body_as="source", ) def sync_gen() -> Generator[Dict[Any, Any], None, None]: yield {} def scan_types() -> None: for _ in scan( es, query={"query": {"match_all": {}}}, request_timeout=10, clear_scroll=True, scroll_kwargs={"request_timeout": 10}, ): pass for _ in scan( es, raise_on_error=False, preserve_order=False, scroll="10m", size=10, request_timeout=10.0, ): pass def streaming_bulk_types() -> None: for _ in streaming_bulk(es, sync_gen()): pass for _ in streaming_bulk(es, sync_gen().__iter__()): pass for _ in streaming_bulk(es, [{}]): pass for _ in streaming_bulk(es, ({},)): pass def bulk_types() -> None: _, _ = bulk(es, sync_gen()) _, _ = bulk(es, sync_gen().__iter__()) _, _ = bulk(es, [{}]) _, _ = bulk(es, ({},)) def reindex_types() -> None: _, _ = reindex( es, "src-index", "target-index", query={"query": {"match": {"key": "val"}}} ) _, _ = reindex( es, source_index="src-index", target_index="target-index", target_client=es ) _, _ = reindex( es, "src-index", "target-index", chunk_size=1, scroll="10m", scan_kwargs={"request_timeout": 10}, bulk_kwargs={"request_timeout": 10}, ) es2 = AsyncElasticsearch( [{"host": "localhost", "port": 9443}], transport_class=AsyncTransport, ) t2 = AsyncTransport( [{}], connection_class=AIOHttpConnection, connection_pool_class=ConnectionPool, sniff_on_start=True, sniffer_timeout=0.1, sniff_timeout=1, sniff_on_connection_fail=False, max_retries=1, retry_on_status={100, 400, 503}, retry_on_timeout=True, send_get_body_as="source", ) async def async_gen() -> AsyncGenerator[Dict[Any, Any], None]: yield {} async def async_scan_types() -> None: async for _ in async_scan( es2, query={"query": {"match_all": {}}}, request_timeout=10, clear_scroll=True, scroll_kwargs={"request_timeout": 10}, ): pass async for _ in async_scan( es2, raise_on_error=False, preserve_order=False, scroll="10m", size=10, request_timeout=10.0, ): pass async def async_streaming_bulk_types() -> None: async for _ in async_streaming_bulk(es2, async_gen()): pass async for _ in async_streaming_bulk(es2, async_gen().__aiter__()): pass async for _ in async_streaming_bulk(es2, [{}]): pass async for _ in async_streaming_bulk(es2, ({},)): pass async def async_bulk_types() -> None: _, _ = await async_bulk(es2, async_gen()) _, _ = await async_bulk(es2, async_gen().__aiter__()) _, _ = await async_bulk(es2, [{}]) _, _ = await async_bulk(es2, ({},)) async def async_reindex_types() -> None: _, _ = await async_reindex( es2, "src-index", "target-index", query={"query": {"match": {"key": "val"}}} ) _, _ = await async_reindex( es2, source_index="src-index", target_index="target-index", target_client=es2 ) _, _ = await async_reindex( es2, "src-index", "target-index", chunk_size=1, scroll="10m", scan_kwargs={"request_timeout": 10}, bulk_kwargs={"request_timeout": 10}, ) elasticsearch-py-7.17.6/test_elasticsearch/test_types/async_types.py000066400000000000000000000060271426163262700260610ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, AsyncGenerator, Dict from elasticsearch import ( AIOHttpConnection, AsyncElasticsearch, AsyncTransport, ConnectionPool, ) from elasticsearch.helpers import ( async_bulk, async_reindex, async_scan, async_streaming_bulk, ) es = AsyncElasticsearch( [{"host": "localhost", "port": 9443}], transport_class=AsyncTransport, ) t = AsyncTransport( [{}], connection_class=AIOHttpConnection, connection_pool_class=ConnectionPool, sniff_on_start=True, sniffer_timeout=0.1, sniff_timeout=1, sniff_on_connection_fail=False, max_retries=1, retry_on_status={100, 400, 503}, retry_on_timeout=True, send_get_body_as="source", ) async def async_gen() -> AsyncGenerator[Dict[Any, Any], None]: yield {} async def async_scan_types() -> None: async for _ in async_scan( es, query={"query": {"match_all": {}}}, request_timeout=10, clear_scroll=True, scroll_kwargs={"request_timeout": 10}, ): pass async for _ in async_scan( es, raise_on_error=False, preserve_order=False, scroll="10m", size=10, request_timeout=10.0, ): pass async def async_streaming_bulk_types() -> None: async for _ in async_streaming_bulk(es, async_gen()): pass async for _ in async_streaming_bulk(es, async_gen().__aiter__()): pass async for _ in async_streaming_bulk(es, [{}]): pass async for _ in async_streaming_bulk(es, ({},)): pass async def async_bulk_types() -> None: _, _ = await async_bulk(es, async_gen()) _, _ = await async_bulk(es, async_gen().__aiter__()) _, _ = await async_bulk(es, [{}]) _, _ = await async_bulk(es, ({},)) async def async_reindex_types() -> None: _, _ = await async_reindex( es, "src-index", "target-index", query={"query": {"match": {"key": "val"}}} ) _, _ = await async_reindex( es, source_index="src-index", target_index="target-index", target_client=es ) _, _ = await async_reindex( es, "src-index", "target-index", chunk_size=1, scroll="10m", scan_kwargs={"request_timeout": 10}, bulk_kwargs={"request_timeout": 10}, ) elasticsearch-py-7.17.6/test_elasticsearch/test_types/sync_types.py000066400000000000000000000053761426163262700257260ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from typing import Any, Dict, Generator from elasticsearch import ( ConnectionPool, Elasticsearch, RequestsHttpConnection, Transport, ) from elasticsearch.helpers import bulk, reindex, scan, streaming_bulk es = Elasticsearch( [{"host": "localhost", "port": 9443}], transport_class=Transport, ) t = Transport( [{}], connection_class=RequestsHttpConnection, connection_pool_class=ConnectionPool, sniff_on_start=True, sniffer_timeout=0.1, sniff_timeout=1, sniff_on_connection_fail=False, max_retries=1, retry_on_status={100, 400, 503}, retry_on_timeout=True, send_get_body_as="source", ) def sync_gen() -> Generator[Dict[Any, Any], None, None]: yield {} def scan_types() -> None: for _ in scan( es, query={"query": {"match_all": {}}}, request_timeout=10, clear_scroll=True, scroll_kwargs={"request_timeout": 10}, ): pass for _ in scan( es, raise_on_error=False, preserve_order=False, scroll="10m", size=10, request_timeout=10.0, ): pass def streaming_bulk_types() -> None: for _ in streaming_bulk(es, sync_gen()): pass for _ in streaming_bulk(es, sync_gen().__iter__()): pass for _ in streaming_bulk(es, [{}]): pass for _ in streaming_bulk(es, ({},)): pass def bulk_types() -> None: _, _ = bulk(es, sync_gen()) _, _ = bulk(es, sync_gen().__iter__()) _, _ = bulk(es, [{}]) _, _ = bulk(es, ({},)) def reindex_types() -> None: _, _ = reindex( es, "src-index", "target-index", query={"query": {"match": {"key": "val"}}} ) _, _ = reindex( es, source_index="src-index", target_index="target-index", target_client=es ) _, _ = reindex( es, "src-index", "target-index", chunk_size=1, scroll="10m", scan_kwargs={"request_timeout": 10}, bulk_kwargs={"request_timeout": 10}, ) elasticsearch-py-7.17.6/test_elasticsearch/test_utils.py000066400000000000000000000021211426163262700235030ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import pytest from elasticsearch.utils import _client_meta_version @pytest.mark.parametrize( ["version", "meta_version"], [("1.26.3", "1.26.3"), ("7.10.1a1", "7.10.1p"), ("7.10.pre", "7.10p")], ) def test_client_meta_version(version, meta_version): assert _client_meta_version(version) == meta_version elasticsearch-py-7.17.6/test_elasticsearch/utils.py000066400000000000000000000217651426163262700224630ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import time from elasticsearch import Elasticsearch, NotFoundError, RequestError from elasticsearch.helpers.test import es_version def wipe_cluster(client): """Wipes a cluster clean between test cases""" close_after_wipe = False try: # If client is async we need to replace the client # with a synchronous one. from elasticsearch import AsyncElasticsearch if isinstance(client, AsyncElasticsearch): client = Elasticsearch(client.transport.hosts, verify_certs=False) close_after_wipe = True except ImportError: pass is_xpack = True if is_xpack: wipe_rollup_jobs(client) wait_for_pending_tasks(client, filter="xpack/rollup/job") wipe_slm_policies(client) # Searchable snapshot indices start in 7.8+ if es_version(client) >= (7, 8): wipe_searchable_snapshot_indices(client) wipe_snapshots(client) if is_xpack: wipe_data_streams(client) wipe_indices(client) if is_xpack: wipe_xpack_templates(client) else: client.indices.delete_template(name="*") client.indices.delete_index_template(name="*") client.cluster.delete_component_template(name="*") wipe_cluster_settings(client) if is_xpack: wipe_ilm_policies(client) wipe_auto_follow_patterns(client) wipe_tasks(client) wipe_node_shutdown_metadata(client) wait_for_pending_datafeeds_and_jobs(client) wait_for_cluster_state_updates_to_finish(client) if close_after_wipe: client.close() def wipe_cluster_settings(client): settings = client.cluster.get_settings() new_settings = {} for name, value in settings.items(): if value: new_settings.setdefault(name, {}) for key in value.keys(): new_settings[name][key + ".*"] = None if new_settings: client.cluster.put_settings(body=new_settings) def wipe_rollup_jobs(client): rollup_jobs = client.rollup.get_jobs(id="_all").get("jobs", ()) for job in rollup_jobs: job_id = job["config"]["id"] client.rollup.stop_job(id=job_id, wait_for_completion=True, ignore=404) client.rollup.delete_job(id=job_id, ignore=404) def wipe_snapshots(client): """Deletes all the snapshots and repositories from the cluster""" in_progress_snapshots = [] repos = client.snapshot.get_repository(repository="_all") for repo_name, repo in repos.items(): if repo["type"] == "fs": snapshots = client.snapshot.get( repository=repo_name, snapshot="_all", ignore_unavailable=True ) for snapshot in snapshots["snapshots"]: if snapshot["state"] == "IN_PROGRESS": in_progress_snapshots.append(snapshot) else: client.snapshot.delete( repository=repo_name, snapshot=snapshot["snapshot"], ignore=404, ) client.snapshot.delete_repository(repository=repo_name, ignore=404) assert in_progress_snapshots == [] def wipe_data_streams(client): try: client.indices.delete_data_stream(name="*", expand_wildcards="all") except Exception: client.indices.delete_data_stream(name="*") def wipe_indices(client): client.indices.delete( index="*,-.ds-ilm-history-*", expand_wildcards="all", ignore=404, ) def wipe_searchable_snapshot_indices(client): cluster_metadata = client.cluster.state( metric="metadata", filter_path="metadata.indices.*.settings.index.store.snapshot", ) if cluster_metadata: for index in cluster_metadata["metadata"]["indices"].keys(): client.indices.delete(index=index) def wipe_xpack_templates(client): templates = [ x.strip() for x in client.cat.templates(h="name", headers={"accept": "text/plain"}).split( "\n" ) if x.strip() ] for template in templates: if is_xpack_template(template): continue try: client.indices.delete_template(name=template) except NotFoundError as e: if "index_template [%s] missing" % template in str(e.info): client.indices.delete_index_template(name=template) # Delete component templates, need to retry because sometimes # indices aren't cleaned up in time before we issue the delete. templates = client.cluster.get_component_template()["component_templates"] templates_to_delete = [ template for template in templates if not is_xpack_template(template["name"]) ] for _ in range(3): for template in list(templates_to_delete): try: client.cluster.delete_component_template( name=template["name"], ) except RequestError: pass else: templates_to_delete.remove(template) if not templates_to_delete: break time.sleep(0.01) def wipe_ilm_policies(client): for policy in client.ilm.get_lifecycle(): if policy not in { "ilm-history-ilm-policy", "slm-history-ilm-policy", "watch-history-ilm-policy", "ml-size-based-ilm-policy", "logs", "metrics", }: client.ilm.delete_lifecycle(policy=policy) def wipe_slm_policies(client): for policy in client.slm.get_lifecycle(): client.slm.delete_lifecycle(policy_id=policy["name"]) def wipe_auto_follow_patterns(client): for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: client.ccr.delete_auto_follow_pattern(name=pattern["name"]) def wipe_node_shutdown_metadata(client): shutdown_status = client.shutdown.get_node() # If response contains these two keys the feature flag isn't enabled # on this cluster so skip this step now. if "_nodes" in shutdown_status and "cluster_name" in shutdown_status: return for shutdown_node in shutdown_status.get("nodes", []): node_id = shutdown_node["node_id"] client.shutdown.delete_node(node_id=node_id) def wipe_tasks(client): tasks = client.tasks.list() for node_name, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): client.tasks.cancel(task_id=task_id, wait_for_completion=True) def wait_for_pending_tasks(client, filter, timeout=30): end_time = time.time() + timeout while time.time() < end_time: tasks = client.cat.tasks(detailed=True, headers={"accept": "text/plain"}).split( "\n" ) if not any(filter in str(task) for task in tasks): break def wait_for_pending_datafeeds_and_jobs(client, timeout=30): end_time = time.time() + timeout while time.time() < end_time: if ( client.ml.get_datafeeds(datafeed_id="*", allow_no_datafeeds=True)["count"] == 0 ): break while time.time() < end_time: if client.ml.get_jobs(job_id="*", allow_no_jobs=True)["count"] == 0: break def wait_for_cluster_state_updates_to_finish(client, timeout=30): end_time = time.time() + timeout while time.time() < end_time: if not client.cluster.pending_tasks().get("tasks", ()): break def is_xpack_template(name): if ".monitoring-" in name: return True if ".watch" in name or ".triggered_watches" in name: return True if ".data-frame-" in name: return True if ".ml-" in name: return True if ".transform-" in name: return True if name in { ".watches", "logstash-index-template", ".logstash-management", "security_audit_log", ".slm-history", ".async-search", ".geoip_databases", "saml-service-provider", "ilm-history", "logs", "logs-settings", "logs-mappings", "metrics", "metrics-settings", "metrics-mappings", "synthetics", "synthetics-settings", "synthetics-mappings", ".snapshot-blob-cache", "data-streams-mappings", }: return True return False elasticsearch-py-7.17.6/utils/000077500000000000000000000000001426163262700162255ustar00rootroot00000000000000elasticsearch-py-7.17.6/utils/build-dists.py000066400000000000000000000231251426163262700210250ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """A command line tool for building and verifying releases Can be used for building both 'elasticsearch' and 'elasticsearchX' dists. Only requires 'name' in 'setup.py' and the directory to be changed. """ import contextlib import os import re import shlex import shutil import sys import tempfile base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) tmp_dir = None @contextlib.contextmanager def set_tmp_dir(): global tmp_dir tmp_dir = tempfile.mkdtemp() yield tmp_dir shutil.rmtree(tmp_dir) tmp_dir = None def run(*argv, expect_exit_code=0): global tmp_dir if tmp_dir is None: os.chdir(base_dir) else: os.chdir(tmp_dir) cmd = " ".join(shlex.quote(x) for x in argv) print("$ " + cmd) exit_code = os.system(cmd) if exit_code != expect_exit_code: print( "Command exited incorrectly: should have been %d was %d" % (expect_exit_code, exit_code) ) exit(exit_code or 1) def test_dist(dist): with set_tmp_dir() as tmp_dir: dist_name = re.match(r"^(elasticsearch\d*)-", os.path.basename(dist)).group(1) # Build the venv and install the dist run("python", "-m", "venv", os.path.join(tmp_dir, "venv")) venv_python = os.path.join(tmp_dir, "venv/bin/python") run(venv_python, "-m", "pip", "install", "-U", "pip", "mypy") run(venv_python, "-m", "pip", "install", dist) # Test the sync namespaces run(venv_python, "-c", f"from {dist_name} import Elasticsearch") run( venv_python, "-c", f"from {dist_name}.helpers import scan, bulk, streaming_bulk, reindex", ) run(venv_python, "-c", f"from {dist_name} import Elasticsearch") run( venv_python, "-c", f"from {dist_name}.helpers import scan, bulk, streaming_bulk, reindex", ) # Ensure that async is not available yet run( venv_python, "-c", f"from {dist_name} import AsyncElasticsearch", expect_exit_code=256, ) run( venv_python, "-c", f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", expect_exit_code=256, ) # Install aiohttp and see that async is now available run(venv_python, "-m", "pip", "install", "aiohttp") run(venv_python, "-c", f"from {dist_name} import AsyncElasticsearch") run( venv_python, "-c", f"from {dist_name}.helpers import async_scan, async_bulk, async_streaming_bulk, async_reindex", ) # Only need to test 'async_types' for non-aliased package # since 'aliased_types' tests both async and sync. if dist_name == "elasticsearch": run( venv_python, "-m", "mypy", "--strict", os.path.join(base_dir, "test_elasticsearch/test_types/async_types.py"), ) # Ensure that the namespaces are correct for the dist for suffix in ("", "1", "2", "5", "6", "7", "8", "9", "10"): distx_name = f"elasticsearch{suffix}" run( venv_python, "-c", f"import {distx_name}", expect_exit_code=256 if distx_name != dist_name else 0, ) # Check that sync types work for 'elasticsearch' and # that aliased types work for 'elasticsearchX' if dist_name == "elasticsearch": run( venv_python, "-m", "mypy", "--strict", os.path.join(base_dir, "test_elasticsearch/test_types/sync_types.py"), ) else: run( venv_python, "-m", "mypy", "--strict", os.path.join( base_dir, "test_elasticsearch/test_types/aliased_types.py" ), ) # Uninstall the dist, see that we can't import things anymore run(venv_python, "-m", "pip", "uninstall", "--yes", dist_name) run( venv_python, "-c", f"from {dist_name} import Elasticsearch", expect_exit_code=256, ) def main(): run("git", "checkout", "--", "setup.py", "elasticsearch/") run("rm", "-rf", "build/", "dist/*", "*.egg-info", ".eggs") # Grab the major version to be used as a suffix. version_path = os.path.join(base_dir, "elasticsearch/_version.py") with open(version_path) as f: version = re.search( r"^__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read(), re.M ).group(1) major_version = version.split(".")[0] # If we're handed a version from the build manager we # should check that the version is correct or write # a new one. if len(sys.argv) >= 2: # 'build_version' is what the release manager wants, # 'expect_version' is what we're expecting to compare # the package version to before building the dists. build_version = expect_version = sys.argv[1] # Any prefixes in the version specifier mean we're making # a pre-release which will modify __versionstr__ locally # and not produce a git tag. if any(x in build_version for x in ("-SNAPSHOT", "-rc", "-alpha", "-beta")): # If a snapshot, then we add '+dev' if "-SNAPSHOT" in build_version: version = version + "+dev" # alpha/beta/rc -> aN/bN/rcN else: pre_number = re.search(r"-(a|b|rc)(?:lpha|eta|)(\d+)$", expect_version) version = version + pre_number.group(1) + pre_number.group(2) expect_version = re.sub( r"(?:-(?:SNAPSHOT|alpha\d+|beta\d+|rc\d+))+$", "", expect_version ) if expect_version.endswith(".x"): expect_version = expect_version[:-1] # For snapshots we ensure that the version in the package # at least *starts* with the version. This is to support # build_version='7.x-SNAPSHOT'. if not version.startswith(expect_version): print( "Version of package (%s) didn't match the " "expected release version (%s)" % (version, build_version) ) exit(1) # A release that will be tagged, we want # there to be no '+dev', etc. elif expect_version != version: print( "Version of package (%s) didn't match the " "expected release version (%s)" % (version, build_version) ) exit(1) for suffix in ("", major_version): run("rm", "-rf", "build/", "*.egg-info", ".eggs") # Rename the module to fit the suffix. shutil.move( os.path.join(base_dir, "elasticsearch"), os.path.join(base_dir, "elasticsearch%s" % suffix), ) # Ensure that the version within 'elasticsearch/_version.py' is correct. version_path = os.path.join(base_dir, f"elasticsearch{suffix}/_version.py") with open(version_path) as f: version_data = f.read() version_data = re.sub( r"__versionstr__ = \"[^\"]+\"", '__versionstr__ = "%s"' % version, version_data, ) with open(version_path, "w") as f: f.truncate() f.write(version_data) # Rewrite setup.py with the new name. setup_py_path = os.path.join(base_dir, "setup.py") with open(setup_py_path) as f: setup_py = f.read() with open(setup_py_path, "w") as f: f.truncate() assert 'package_name = "elasticsearch"' in setup_py f.write( setup_py.replace( 'package_name = "elasticsearch"', 'package_name = "elasticsearch%s"' % suffix, ) ) # Build the sdist/wheels run("python", "setup.py", "sdist", "bdist_wheel") # Clean up everything. run("git", "checkout", "--", "setup.py", "elasticsearch/") if suffix: run("rm", "-rf", "elasticsearch%s/" % suffix) # Test everything that got created dists = os.listdir(os.path.join(base_dir, "dist")) assert len(dists) == 4 for dist in dists: test_dist(os.path.join(base_dir, "dist", dist)) os.system("chmod a+w dist/*") # After this run 'python -m twine upload dist/*' print( "\n\n" "===============================\n\n" " * Releases are ready! *\n\n" "$ python -m twine upload dist/*\n\n" "===============================" ) if __name__ == "__main__": main() elasticsearch-py-7.17.6/utils/bump-version.py000066400000000000000000000056011426163262700212270ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Command line tool which changes the branch to be ready to build and test the given Elastic stack version. """ import re import sys from pathlib import Path SOURCE_DIR = Path(__file__).absolute().parent.parent def find_and_replace(path, pattern, replace): # Does a find and replace within a file path and complains # if the given pattern isn't found in the file. with open(path, "r") as f: old_data = f.read() if re.search(pattern, old_data, flags=re.MULTILINE) is None: print(f"Didn't find the pattern {pattern!r} in {path!s}") exit(1) new_data = re.sub(pattern, replace, old_data, flags=re.MULTILINE) with open(path, "w") as f: f.truncate() f.write(new_data) def main(): if len(sys.argv) != 2: print("usage: utils/bump-version.py [stack version]") exit(1) stack_version = sys.argv[1] try: python_version = re.search(r"^([0-9][0-9\.]*[0-9]+)", stack_version).group(1) except AttributeError: print(f"Couldn't match the given stack version {stack_version!r}") exit(1) # Pad the version value with .0 until there # we have the major, minor, and patch. for _ in range(3): if len(python_version.split(".")) >= 3: break python_version += ".0" find_and_replace( path=SOURCE_DIR / "elasticsearch/_version.py", pattern=r"__versionstr__ = \"[0-9]+[0-9\.]*[0-9](?:\+dev)?\"", replace=f'__versionstr__ = "{python_version}"', ) # These values should always be the 'major.minor-SNAPSHOT' major_minor_version = ".".join(python_version.split(".")[:2]) find_and_replace( path=SOURCE_DIR / ".ci/test-matrix.yml", pattern=r'STACK_VERSION:\s+\- "[0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?"', replace=f'STACK_VERSION:\n - "{major_minor_version}.0-SNAPSHOT"', ) find_and_replace( path=SOURCE_DIR / ".github/workflows/unified-release.yml", pattern=r'STACK_VERSION:\s+"[0-9]+[0-9\.]*[0-9](?:\-SNAPSHOT)?"', replace=f'STACK_VERSION: "{major_minor_version}-SNAPSHOT"', ) if __name__ == "__main__": main() elasticsearch-py-7.17.6/utils/license-headers.py000066400000000000000000000105141426163262700216330ustar00rootroot00000000000000# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Script which verifies that all source files have a license header. Has two modes: 'fix' and 'check'. 'fix' fixes problems, 'check' will error out if 'fix' would have changed the file. """ import os import re import sys from itertools import chain from typing import Iterator, List lines_to_keep = ["# -*- coding: utf-8 -*-\n", "#!/usr/bin/env python\n"] license_header_lines = [ "# Licensed to Elasticsearch B.V. under one or more contributor\n", "# license agreements. See the NOTICE file distributed with\n", "# this work for additional information regarding copyright\n", "# ownership. Elasticsearch B.V. licenses this file to you under\n", '# the Apache License, Version 2.0 (the "License"); you may\n', "# not use this file except in compliance with the License.\n", "# You may obtain a copy of the License at\n", "#\n", "# http://www.apache.org/licenses/LICENSE-2.0\n", "#\n", "# Unless required by applicable law or agreed to in writing,\n", "# software distributed under the License is distributed on an\n", '# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n', "# KIND, either express or implied. See the License for the\n", "# specific language governing permissions and limitations\n", "# under the License.\n", "\n", ] def find_files_to_fix(sources: List[str]) -> Iterator[str]: """Iterates over all files and dirs in 'sources' and returns only the filepaths that need fixing. """ for source in sources: if os.path.isfile(source) and does_file_need_fix(source): yield source elif os.path.isdir(source): for root, _, filenames in os.walk(source): for filename in filenames: filepath = os.path.join(root, filename) if does_file_need_fix(filepath): yield filepath def does_file_need_fix(filepath: str) -> bool: if not re.search(r"\.pyi?$", filepath): return False with open(filepath, mode="r") as f: first_license_line = None for line in f: if line == license_header_lines[0]: first_license_line = line break elif line not in lines_to_keep: return True for header_line, line in zip( license_header_lines, chain((first_license_line,), f) ): if line != header_line: return True return False def add_header_to_file(filepath: str) -> None: with open(filepath, mode="r") as f: lines = list(f) i = 0 for i, line in enumerate(lines): if line not in lines_to_keep: break lines = lines[:i] + license_header_lines + lines[i:] with open(filepath, mode="w") as f: f.truncate() f.write("".join(lines)) print(f"Fixed {os.path.relpath(filepath, os.getcwd())}") def main(): mode = sys.argv[1] assert mode in ("fix", "check") sources = [os.path.abspath(x) for x in sys.argv[2:]] files_to_fix = find_files_to_fix(sources) if mode == "fix": for filepath in files_to_fix: add_header_to_file(filepath) else: no_license_headers = list(files_to_fix) if no_license_headers: print("No license header found in:") cwd = os.getcwd() [ print(f" - {os.path.relpath(filepath, cwd)}") for filepath in no_license_headers ] sys.exit(1) else: print("All files had license header") if __name__ == "__main__": main()