././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.4892735 awscli-1.22.34/0000755000000000000000000000000000000000000013104 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/LICENSE.txt0000644000000000000000000000104500000000000014727 0ustar00rootroot00000000000000Copyright 2012-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/MANIFEST.in0000644000000000000000000000025200000000000014641 0ustar00rootroot00000000000000include README.rst include LICENSE.txt include requirements.txt include UPGRADE_PY3.md recursive-include awscli/examples *.rst *.txt recursive-include awscli/data *.json ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.4892735 awscli-1.22.34/PKG-INFO0000644000000000000000000002610600000000000014206 0ustar00rootroot00000000000000Metadata-Version: 2.1 Name: awscli Version: 1.22.34 Summary: Universal Command Line Environment for AWS. Home-page: http://aws.amazon.com/cli/ Author: Amazon Web Services License: Apache License 2.0 Project-URL: Source, https://github.com/aws/aws-cli Project-URL: Reference, https://docs.aws.amazon.com/cli/latest/reference/ Project-URL: Changelog, https://github.com/aws/aws-cli/blob/develop/CHANGELOG.rst Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers Classifier: Intended Audience :: System Administrators Classifier: Natural Language :: English Classifier: License :: OSI Approved :: Apache Software License Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Requires-Python: >= 3.6 License-File: LICENSE.txt aws-cli ======= .. image:: https://github.com/aws/aws-cli/actions/workflows/run-tests.yml/badge.svg :target: https://github.com/aws/aws-cli/actions/workflows/run-tests.yml :alt: Build Status This package provides a unified command line interface to Amazon Web Services. Jump to: - `Getting Started <#getting-started>`__ - `Getting Help <#getting-help>`__ - `More Resources <#more-resources>`__ Getting Started --------------- This README is for the AWS CLI version 1. If you are looking for information about the AWS CLI version 2, please visit the `v2 branch `__. Requirements ~~~~~~~~~~~~ The aws-cli package works on Python versions: - 3.6.x and greater - 3.7.x and greater - 3.8.x and greater - 3.9.x and greater - 3.10.x and greater Notices ~~~~~~~ On 01/15/2021, deprecation for Python 2.7 was announced and support was dropped on 07/15/2021. To avoid disruption, customers using the AWS CLI on Python 2.7 may need to upgrade their version of Python or pin the version of the AWS CLI. For more information, see this `blog post `__. On 10/29/2020, support for Python 3.4 and Python 3.5 was deprecated and support was dropped on 02/01/2021. Customers using the AWS CLI on Python 3.4 or 3.5 will need to upgrade their version of Python to continue receiving feature and security updates. For more information, see this `blog post `__. *Attention!* *We recommend that all customers regularly monitor the* `Amazon Web Services Security Bulletins website `__ *for any important security bulletins related to aws-cli.* Maintenance and Support for CLI Major Versions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The AWS CLI version 1 was made generally available on 09/02/2013 and is currently in the full support phase of the availability life cycle. For information about maintenance and support for SDK major versions and their underlying dependencies, see the `Maintenance Policy `__ section in the *AWS SDKs and Tools Shared Configuration and Credentials Reference Guide*. Installation ~~~~~~~~~~~~ Installation of the AWS CLI and its dependencies use a range of packaging features provided by ``pip`` and ``setuptools``. To ensure smooth installation, it's recommended to use: - ``pip``: 9.0.2 or greater - ``setuptools``: 36.2.0 or greater The safest way to install the AWS CLI is to use `pip `__ in a ``virtualenv``: :: $ python -m pip install awscli or, if you are not installing in a ``virtualenv``, to install globally: :: $ sudo python -m pip install awscli or for your user: :: $ python -m pip install --user awscli If you have the aws-cli package installed and want to upgrade to the latest version, you can run: :: $ python -m pip install --upgrade awscli This will install the aws-cli package as well as all dependencies. .. note:: On macOS, if you see an error regarding the version of ``six`` that came with ``distutils`` in El Capitan, use the ``--ignore-installed`` option: :: $ sudo python -m pip install awscli --ignore-installed six On Linux and Mac OS, the AWS CLI can be installed using a `bundled installer `__. The AWS CLI can also be installed on Windows via an `MSI Installer `__. If you want to run the ``develop`` branch of the AWS CLI, see the `Development Version `__ section of the contributing guide. See the `installation `__ section of the AWS CLI User Guide for more information. Configuration ~~~~~~~~~~~~~ Before using the AWS CLI, you need to configure your AWS credentials. You can do this in several ways: - Configuration command - Environment variables - Shared credentials file - Config file - IAM Role The quickest way to get started is to run the ``aws configure`` command: :: $ aws configure AWS Access Key ID: MYACCESSKEY AWS Secret Access Key: MYSECRETKEY Default region name [us-west-2]: us-west-2 Default output format [None]: json To use environment variables, do the following: :: $ export AWS_ACCESS_KEY_ID= $ export AWS_SECRET_ACCESS_KEY= To use the shared credentials file, create an INI formatted file like this: :: [default] aws_access_key_id=MYACCESSKEY aws_secret_access_key=MYSECRETKEY [testing] aws_access_key_id=MYACCESKEY aws_secret_access_key=MYSECRETKEY and place it in ``~/.aws/credentials`` (or in ``%UserProfile%\.aws/credentials`` on Windows). If you wish to place the shared credentials file in a different location than the one specified above, you need to tell aws-cli where to find it. Do this by setting the appropriate environment variable: :: $ export AWS_SHARED_CREDENTIALS_FILE=/path/to/shared_credentials_file To use a config file, create an INI formatted file like this: :: [default] aws_access_key_id= aws_secret_access_key= # Optional, to define default region for this profile. region=us-west-1 [profile testing] aws_access_key_id= aws_secret_access_key= region=us-west-2 and place it in ``~/.aws/config`` (or in ``%UserProfile%\.aws\config`` on Windows). If you wish to place the config file in a different location than the one specified above, you need to tell the AWS CLI where to find it. Do this by setting the appropriate environment variable: :: $ export AWS_CONFIG_FILE=/path/to/config_file As you can see, you can have multiple ``profiles`` defined in both the shared credentials file and the configuration file. You can then specify which profile to use by using the ``--profile`` option. If no profile is specified the ``default`` profile is used. In the config file, except for the default profile, you **must** prefix each config section of a profile group with ``profile``. For example, if you have a profile named "testing" the section header would be ``[profile testing]``. The final option for credentials is highly recommended if you are using the AWS CLI on an EC2 instance. `IAM Roles `__ are a great way to have credentials installed automatically on your instance. If you are using IAM Roles, the AWS CLI will find and use them automatically. In addition to credentials, a number of other variables can be configured either with environment variables, configuration file entries, or both. See the `AWS Tools and SDKs Shared Configuration and Credentials Reference Guide `__ for more information. For more information about configuration options, please refer to the `AWS CLI Configuration Variables topic `__. You can access this topic from the AWS CLI as well by running ``aws help config-vars``. Basic Commands ~~~~~~~~~~~~~~ An AWS CLI command has the following structure: :: $ aws [options and parameters] For example, to list S3 buckets, the command would be: :: $ aws s3 ls To view help documentation, use one of the following: :: $ aws help $ aws help $ aws help To get the version of the AWS CLI: :: $ aws --version To turn on debugging output: :: $ aws --debug You can read more information on the `Using the AWS CLI `__ chapter of the AWS CLI User Guide. Command Completion ~~~~~~~~~~~~~~~~~~ The aws-cli package includes a command completion feature for Unix-like systems. This feature is not automatically installed so you need to configure it manually. To learn more, read the `AWS CLI Command completion topic `__. Getting Help ------------ The best way to interact with our team is through GitHub. You can `open an issue `__ and choose from one of our templates for guidance, bug reports, or feature requests. You may find help from the community on `Stack Overflow `__ with the tag `aws-cli `__ or on the `AWS Discussion Forum for CLI `__. If you have a support plan with `AWS Support `__, you can also create a new support case. Please check for open similar `issues `__ before opening another one. The AWS CLI implements AWS service APIs. For general issues regarding the services or their limitations, you may find the `Amazon Web Services Discussion Forums `__ helpful. More Resources -------------- - `Changelog `__ - `AWS CLI Documentation `__ - `AWS CLI User Guide `__ - `AWS CLI Command Reference `__ - `Amazon Web Services Discussion Forums `__ - `AWS Support `__ .. |Build Status| image:: https://travis-ci.org/aws/aws-cli.svg?branch=develop :target: https://travis-ci.org/aws/aws-cli .. |Gitter| image:: https://badges.gitter.im/aws/aws-cli.svg :target: https://gitter.im/aws/aws-cli ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014272.0 awscli-1.22.34/README.rst0000644000000000000000000002402000000000000014571 0ustar00rootroot00000000000000aws-cli ======= .. image:: https://github.com/aws/aws-cli/actions/workflows/run-tests.yml/badge.svg :target: https://github.com/aws/aws-cli/actions/workflows/run-tests.yml :alt: Build Status This package provides a unified command line interface to Amazon Web Services. Jump to: - `Getting Started <#getting-started>`__ - `Getting Help <#getting-help>`__ - `More Resources <#more-resources>`__ Getting Started --------------- This README is for the AWS CLI version 1. If you are looking for information about the AWS CLI version 2, please visit the `v2 branch `__. Requirements ~~~~~~~~~~~~ The aws-cli package works on Python versions: - 3.6.x and greater - 3.7.x and greater - 3.8.x and greater - 3.9.x and greater - 3.10.x and greater Notices ~~~~~~~ On 01/15/2021, deprecation for Python 2.7 was announced and support was dropped on 07/15/2021. To avoid disruption, customers using the AWS CLI on Python 2.7 may need to upgrade their version of Python or pin the version of the AWS CLI. For more information, see this `blog post `__. On 10/29/2020, support for Python 3.4 and Python 3.5 was deprecated and support was dropped on 02/01/2021. Customers using the AWS CLI on Python 3.4 or 3.5 will need to upgrade their version of Python to continue receiving feature and security updates. For more information, see this `blog post `__. *Attention!* *We recommend that all customers regularly monitor the* `Amazon Web Services Security Bulletins website `__ *for any important security bulletins related to aws-cli.* Maintenance and Support for CLI Major Versions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The AWS CLI version 1 was made generally available on 09/02/2013 and is currently in the full support phase of the availability life cycle. For information about maintenance and support for SDK major versions and their underlying dependencies, see the `Maintenance Policy `__ section in the *AWS SDKs and Tools Shared Configuration and Credentials Reference Guide*. Installation ~~~~~~~~~~~~ Installation of the AWS CLI and its dependencies use a range of packaging features provided by ``pip`` and ``setuptools``. To ensure smooth installation, it's recommended to use: - ``pip``: 9.0.2 or greater - ``setuptools``: 36.2.0 or greater The safest way to install the AWS CLI is to use `pip `__ in a ``virtualenv``: :: $ python -m pip install awscli or, if you are not installing in a ``virtualenv``, to install globally: :: $ sudo python -m pip install awscli or for your user: :: $ python -m pip install --user awscli If you have the aws-cli package installed and want to upgrade to the latest version, you can run: :: $ python -m pip install --upgrade awscli This will install the aws-cli package as well as all dependencies. .. note:: On macOS, if you see an error regarding the version of ``six`` that came with ``distutils`` in El Capitan, use the ``--ignore-installed`` option: :: $ sudo python -m pip install awscli --ignore-installed six On Linux and Mac OS, the AWS CLI can be installed using a `bundled installer `__. The AWS CLI can also be installed on Windows via an `MSI Installer `__. If you want to run the ``develop`` branch of the AWS CLI, see the `Development Version `__ section of the contributing guide. See the `installation `__ section of the AWS CLI User Guide for more information. Configuration ~~~~~~~~~~~~~ Before using the AWS CLI, you need to configure your AWS credentials. You can do this in several ways: - Configuration command - Environment variables - Shared credentials file - Config file - IAM Role The quickest way to get started is to run the ``aws configure`` command: :: $ aws configure AWS Access Key ID: MYACCESSKEY AWS Secret Access Key: MYSECRETKEY Default region name [us-west-2]: us-west-2 Default output format [None]: json To use environment variables, do the following: :: $ export AWS_ACCESS_KEY_ID= $ export AWS_SECRET_ACCESS_KEY= To use the shared credentials file, create an INI formatted file like this: :: [default] aws_access_key_id=MYACCESSKEY aws_secret_access_key=MYSECRETKEY [testing] aws_access_key_id=MYACCESKEY aws_secret_access_key=MYSECRETKEY and place it in ``~/.aws/credentials`` (or in ``%UserProfile%\.aws/credentials`` on Windows). If you wish to place the shared credentials file in a different location than the one specified above, you need to tell aws-cli where to find it. Do this by setting the appropriate environment variable: :: $ export AWS_SHARED_CREDENTIALS_FILE=/path/to/shared_credentials_file To use a config file, create an INI formatted file like this: :: [default] aws_access_key_id= aws_secret_access_key= # Optional, to define default region for this profile. region=us-west-1 [profile testing] aws_access_key_id= aws_secret_access_key= region=us-west-2 and place it in ``~/.aws/config`` (or in ``%UserProfile%\.aws\config`` on Windows). If you wish to place the config file in a different location than the one specified above, you need to tell the AWS CLI where to find it. Do this by setting the appropriate environment variable: :: $ export AWS_CONFIG_FILE=/path/to/config_file As you can see, you can have multiple ``profiles`` defined in both the shared credentials file and the configuration file. You can then specify which profile to use by using the ``--profile`` option. If no profile is specified the ``default`` profile is used. In the config file, except for the default profile, you **must** prefix each config section of a profile group with ``profile``. For example, if you have a profile named "testing" the section header would be ``[profile testing]``. The final option for credentials is highly recommended if you are using the AWS CLI on an EC2 instance. `IAM Roles `__ are a great way to have credentials installed automatically on your instance. If you are using IAM Roles, the AWS CLI will find and use them automatically. In addition to credentials, a number of other variables can be configured either with environment variables, configuration file entries, or both. See the `AWS Tools and SDKs Shared Configuration and Credentials Reference Guide `__ for more information. For more information about configuration options, please refer to the `AWS CLI Configuration Variables topic `__. You can access this topic from the AWS CLI as well by running ``aws help config-vars``. Basic Commands ~~~~~~~~~~~~~~ An AWS CLI command has the following structure: :: $ aws [options and parameters] For example, to list S3 buckets, the command would be: :: $ aws s3 ls To view help documentation, use one of the following: :: $ aws help $ aws help $ aws help To get the version of the AWS CLI: :: $ aws --version To turn on debugging output: :: $ aws --debug You can read more information on the `Using the AWS CLI `__ chapter of the AWS CLI User Guide. Command Completion ~~~~~~~~~~~~~~~~~~ The aws-cli package includes a command completion feature for Unix-like systems. This feature is not automatically installed so you need to configure it manually. To learn more, read the `AWS CLI Command completion topic `__. Getting Help ------------ The best way to interact with our team is through GitHub. You can `open an issue `__ and choose from one of our templates for guidance, bug reports, or feature requests. You may find help from the community on `Stack Overflow `__ with the tag `aws-cli `__ or on the `AWS Discussion Forum for CLI `__. If you have a support plan with `AWS Support `__, you can also create a new support case. Please check for open similar `issues `__ before opening another one. The AWS CLI implements AWS service APIs. For general issues regarding the services or their limitations, you may find the `Amazon Web Services Discussion Forums `__ helpful. More Resources -------------- - `Changelog `__ - `AWS CLI Documentation `__ - `AWS CLI User Guide `__ - `AWS CLI Command Reference `__ - `Amazon Web Services Discussion Forums `__ - `AWS Support `__ .. |Build Status| image:: https://travis-ci.org/aws/aws-cli.svg?branch=develop :target: https://travis-ci.org/aws/aws-cli .. |Gitter| image:: https://badges.gitter.im/aws/aws-cli.svg :target: https://gitter.im/aws/aws-cli ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/UPGRADE_PY3.md0000644000000000000000000001463600000000000015222 0ustar00rootroot00000000000000# CLI Python 3 Migration Guide Python 2.7 was deprecated by the [Python Software Foundation](https://www.python.org/psf-landing/) back on January 1, 2020 following a multi-year process of phasing it out. Because of this, AWS has deprecated support for Python 2.7, meaning versions the AWS CLI v1 released after the deprecation date no longer work with Python 2.7. ----- **Note** Since the AWS CLI v2 bundles its own copy of Python, this transition only impacts users of the CLI v1. You can upgrade to the AWS CLI v2 to avoid these deprecations in the future. ---- ## Timeline Going forward, customers using the CLI v1 should transition to using Python 3, with Python 3.6 becoming the minimum by the end of the transition. The deprecation dates for the affected versions of Python are: |Python version|Deprecation date| |--------------|----------------| | Python 2.7| 7/15/2021| | Python 3.4 and 3.5| 2/1/2021| ## Impact on the AWS CLI The AWS Command Line Interface is built using the Python SDK, so it's affected by this transition. AWS CLI v2 isn't affected by this transition, since it bundles its own copy of Python 3. However, if you still use the AWS CLI v1, you need to decide whether to [upgrade to Python 3](#upgrading-to-python-3) or transition to the [AWS CLI v2](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html). ## Upgrading to Python 3 Before starting this process, we highly recommend [upgrading to AWS CLI v2](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html). This will avoid the requirement for future upgrades and isolate your CLI usage from conflicts with other packages like boto3 and botocore. ### Do I need to upgrade? First, let’s check if you need to upgrade to Python 3. If you have the AWS CLI installed, you can quickly check which version of Python it’s using with this command. ```bash $ aws --version aws-cli/1.18.191 Python/2.7.18 Darwin/19.6.0 botocore/1.19.31 ``` If the second portion of the version string, starting with **Python/** isn’t Python/3.6.x or higher, you should review the options below. ### Installing CLI with Python 3 If you’re using the **MSI installer**, you can simply start using these Python 3 based installers [[32 bit](https://s3.amazonaws.com/aws-cli/AWSCLI32PY3.msi)] [[64 bit](https://s3.amazonaws.com/aws-cli/AWSCLI64PY3.msi)]. Otherwise, upgrading Python versions isn’t difficult. 1. To begin, uninstall your existing copy of the AWS CLI. You can find instructions in the [CLI v1 installation guide](https://docs.aws.amazon.com/cli/latest/userguide/install-linux.html). 2. Now we’ll install Python 3.6 or later. You can get Python from [Python.org](https://www.python.org/downloads) or using your local package manager. In this example, we’ll use a recent version, Python 3.8.7, to ensure the longest support window. 3. Next, depending on your installation method, the new Python installation should be available at one of these locations. Use these commands to verify: ```bash $ python --version Python 3.8.7 $ python3 --version Python 3.8.7 $ python3.8 --version Python 3.8.7 ``` 5. Here, we're using the **python** command from above to make sure we're installing with the right version. Use whichever alias provided the desired Python version. ```bash $ python -m pip install awscli ``` Alternatively, if you're using the bundled installer you can use: ```bash $ python awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws ``` 7. If you wish, you may verify that the newly installed copy of the AWS CLI tool, **aws**, is using the correct version of Python. The **aws --version** command reports the **aws** tool's version number, followed by the version of Python it's running under, then the operating system version and the version of botocore. As long as the Python version is at least 3.6, you're ready to go: ```bash $ aws --version aws-cli/1.18.191 Python/3.8.7 Darwin/19.6.0 botocore/1.19.31 ``` ## If you're unable to upgrade to Python 3 It may be possible that you're unable to upgrade to Python 3. Under these circumstances, you should be prepared for the deprecation date, in order to not be inconvenienced when the time arrives. If you're using a version of the AWS CLI v1 released prior to the deprecation date, it will continue to function after end of support. These versions however will no longer be receiving security or feature updates. If those are required, you will need to migrate to Python 3 to start receiving updates again. ### Upgrade a pip-based install If you install the AWS CLI using pip, as long as you use pip 10.0 and later, you will automatically install the last available version compatible with Python 2.7. ### Windows MSI Installer If you installed the AWS CLI v1 using the Windows MSI Installer for Python 3 [[32 bit](https://s3.amazonaws.com/aws-cli/AWSCLI32PY3.msi)] [[64 bit](https://s3.amazonaws.com/aws-cli/AWSCLI64PY3.msi)], you're not impacted by this transition. These installers stay up-to-date with each release. If you're still using the AWS CLI v1 as installed using the Windows MSI Installer for Python 2, be aware that after the deprecation date, the download links for the latest version of the CLI v1 Windows MSI Installer will point to the Python 3 MSIs. Previous releases, including those for Python 2, will remain available at their version-specific URLs: * `https://s3.amazonaws.com/aws-cli/AWSCLI32-{VERSION}.msi` * `https://s3.amazonaws.com/aws-cli/AWSCLI64-{VERSION}.msi` ### Upgrade with the AWS CLI bundled installer If you use the AWS CLI bundled installer to install the AWS CLI v1 and cannot upgrade, you will need to ensure you’re downloading a Python 2 compatible version. All versions released prior to the deprecation date should be compatible. You can download a specific installer using the URL `https://s3.amazonaws.com/aws-cli/awscli-bundle-{VERSION}.zip`, where "`{VERSION}`" is the AWS CLI version you wish to install. For example, you could choose version 1.18.200 using the following command: ```bash curl https://s3.amazonaws.com/aws-cli/awscli-bundle-1.18.200.zip -o awscli-bundle.zip ``` Once you've downloaded the bundle, proceed with step 2 of the bundle-based installation instructions for your platform: * [Linux](https://docs.aws.amazon.com/cli/latest/userguide/install-linux.html#install-linux-bundled) * [macOS](https://docs.aws.amazon.com/cli/latest/userguide/install-macos.html#install-macosos-bundled-sudo) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0812721 awscli-1.22.34/awscli/0000755000000000000000000000000000000000000014366 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014455.0 awscli-1.22.34/awscli/__init__.py0000644000000000000000000000267200000000000016506 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """ AWSCLI ---- A Universal Command Line Environment for Amazon Web Services. """ import os __version__ = '1.22.34' # # Get our data path to be added to botocore's search path # _awscli_data_path = [] if 'AWS_DATA_PATH' in os.environ: for path in os.environ['AWS_DATA_PATH'].split(os.pathsep): path = os.path.expandvars(path) path = os.path.expanduser(path) _awscli_data_path.append(path) _awscli_data_path.append( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data') ) os.environ['AWS_DATA_PATH'] = os.pathsep.join(_awscli_data_path) EnvironmentVariables = { 'ca_bundle': ('ca_bundle', 'AWS_CA_BUNDLE', None, None), 'output': ('output', 'AWS_DEFAULT_OUTPUT', 'json', None), } SCALAR_TYPES = set([ 'string', 'float', 'integer', 'long', 'boolean', 'double', 'blob', 'timestamp' ]) COMPLEX_TYPES = set(['structure', 'map', 'list']) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/__main__.py0000644000000000000000000000122700000000000016462 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys from awscli.clidriver import main if __name__ == "__main__": sys.exit(main()) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014272.0 awscli-1.22.34/awscli/alias.py0000644000000000000000000002571700000000000016045 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging import os import shlex import subprocess from botocore.configloader import raw_config_parse from awscli.compat import compat_shell_quote from awscli.commands import CLICommand from awscli.utils import emit_top_level_args_parsed_event LOG = logging.getLogger(__name__) class InvalidAliasException(Exception): pass class AliasLoader(object): def __init__(self, alias_filename=os.path.expanduser( os.path.join('~', '.aws', 'cli', 'alias'))): """Interface for loading and interacting with alias file :param alias_filename: The name of the file to load aliases from. This file must be an INI file. """ self._filename = alias_filename self._aliases = None def _build_aliases(self): self._aliases = self._load_aliases() self._cleanup_alias_values(self._aliases.get('toplevel', {})) def _load_aliases(self): if os.path.exists(self._filename): return raw_config_parse( self._filename, parse_subsections=False) return {'toplevel': {}} def _cleanup_alias_values(self, aliases): for alias in aliases: # Beginning and end line separators should not be included # in the internal representation of the alias value. aliases[alias] = aliases[alias].strip() def get_aliases(self): if self._aliases is None: self._build_aliases() return self._aliases.get('toplevel', {}) class AliasCommandInjector(object): def __init__(self, session, alias_loader): """Injects alias commands for a command table :type session: botocore.session.Session :param session: The botocore session :type alias_loader: awscli.alias.AliasLoader :param alias_loader: The alias loader to use """ self._session = session self._alias_loader = alias_loader def inject_aliases(self, command_table, parser): for alias_name, alias_value in \ self._alias_loader.get_aliases().items(): if alias_value.startswith('!'): alias_cmd = ExternalAliasCommand(alias_name, alias_value) else: service_alias_cmd_args = [ alias_name, alias_value, self._session, command_table, parser ] # If the alias name matches something already in the # command table provide the command it is about # to clobber as a possible reference that it will # need to proxy to. if alias_name in command_table: service_alias_cmd_args.append( command_table[alias_name]) alias_cmd = ServiceAliasCommand(*service_alias_cmd_args) command_table[alias_name] = alias_cmd class BaseAliasCommand(CLICommand): _UNDOCUMENTED = True def __init__(self, alias_name, alias_value): """Base class for alias command :type alias_name: string :param alias_name: The name of the alias :type alias_value: string :param alias_value: The parsed value of the alias. This can be retrieved from `AliasLoader.get_aliases()[alias_name]` """ self._alias_name = alias_name self._alias_value = alias_value def __call__(self, args, parsed_args): raise NotImplementedError('__call__') @property def name(self): return self._alias_name @name.setter def name(self, value): self._alias_name = value class ServiceAliasCommand(BaseAliasCommand): UNSUPPORTED_GLOBAL_PARAMETERS = [ 'debug', 'profile' ] def __init__(self, alias_name, alias_value, session, command_table, parser, shadow_proxy_command=None): """Command for a `toplevel` subcommand alias :type alias_name: string :param alias_name: The name of the alias :type alias_value: string :param alias_value: The parsed value of the alias. This can be retrieved from `AliasLoader.get_aliases()[alias_name]` :type session: botocore.session.Session :param session: The botocore session :type command_table: dict :param command_table: The command table containing all of the possible service command objects that a particular alias could redirect to. :type parser: awscli.argparser.MainArgParser :param parser: The parser to parse commands provided at the top level of a CLI command which includes service commands and global parameters. This is used to parse the service command and any global parameters from the alias's value. :type shadow_proxy_command: CLICommand :param shadow_proxy_command: A built-in command that potentially shadows the alias in name. If the alias references this command in its value, the alias should proxy to this command as oppposed to proxy to itself in the command table """ super(ServiceAliasCommand, self).__init__(alias_name, alias_value) self._session = session self._command_table = command_table self._parser = parser self._shadow_proxy_command = shadow_proxy_command def __call__(self, args, parsed_globals): alias_args = self._get_alias_args() parsed_alias_args, remaining = self._parser.parse_known_args( alias_args) self._update_parsed_globals(parsed_alias_args, parsed_globals) # Take any of the remaining arguments that were not parsed out and # prepend them to the remaining args provided to the alias. remaining.extend(args) LOG.debug( 'Alias %r passing on arguments: %r to %r command', self._alias_name, remaining, parsed_alias_args.command) # Pass the update remaining args and global args to the service command # the alias proxied to. command = self._command_table[parsed_alias_args.command] if self._shadow_proxy_command: shadow_name = self._shadow_proxy_command.name # Use the shadow command only if the aliases value # uses that command indicating it needs to proxy over to # a built-in command. if shadow_name == parsed_alias_args.command: LOG.debug( 'Using shadowed command object: %s ' 'for alias: %s', self._shadow_proxy_command, self._alias_name ) command = self._shadow_proxy_command return command(remaining, parsed_globals) def _get_alias_args(self): try: alias_args = shlex.split(self._alias_value) except ValueError as e: raise InvalidAliasException( 'Value of alias "%s" could not be parsed. ' 'Received error: %s when parsing:\n%s' % ( self._alias_name, e, self._alias_value) ) alias_args = [arg.strip(os.linesep) for arg in alias_args] LOG.debug( 'Expanded subcommand alias %r with value: %r to: %r', self._alias_name, self._alias_value, alias_args ) return alias_args def _update_parsed_globals(self, parsed_alias_args, parsed_globals): global_params_to_update = self._get_global_parameters_to_update( parsed_alias_args) # Emit the top level args parsed event to ensure all possible # customizations that typically get applied are applied to the # global parameters provided in the alias before updating # the original provided global parameter values # and passing those onto subsequent commands. emit_top_level_args_parsed_event(self._session, parsed_alias_args) for param_name in global_params_to_update: updated_param_value = getattr(parsed_alias_args, param_name) setattr(parsed_globals, param_name, updated_param_value) def _get_global_parameters_to_update(self, parsed_alias_args): # Retrieve a list of global parameters that the newly parsed args # from the alias will have to clobber from the originally provided # parsed globals. global_params_to_update = [] for parsed_param, value in vars(parsed_alias_args).items(): # To determine which parameters in the alias were global values # compare the parsed alias parameters to the default as # specified by the parser. If the parsed values from the alias # differs from the default value in the parser, # that global parameter must have been provided in the alias. if self._parser.get_default(parsed_param) != value: if parsed_param in self.UNSUPPORTED_GLOBAL_PARAMETERS: raise InvalidAliasException( 'Global parameter "--%s" detected in alias "%s" ' 'which is not support in subcommand aliases.' % ( parsed_param, self._alias_name)) else: global_params_to_update.append(parsed_param) return global_params_to_update class ExternalAliasCommand(BaseAliasCommand): def __init__(self, alias_name, alias_value, invoker=subprocess.call): """Command for external aliases Executes command external of CLI as opposed to being a proxy to another command. :type alias_name: string :param alias_name: The name of the alias :type alias_value: string :param alias_value: The parsed value of the alias. This can be retrieved from `AliasLoader.get_aliases()[alias_name]` :type invoker: callable :param invoker: Callable to run arguments of external alias. The signature should match that of ``subprocess.call`` """ self._alias_name = alias_name self._alias_value = alias_value self._invoker = invoker def __call__(self, args, parsed_globals): command_components = [ self._alias_value[1:] ] command_components.extend(compat_shell_quote(a) for a in args) command = ' '.join(command_components) LOG.debug( 'Using external alias %r with value: %r to run: %r', self._alias_name, self._alias_value, command) return self._invoker(command, shell=True) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/argparser.py0000644000000000000000000001676500000000000016745 0ustar00rootroot00000000000000# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import argparse import sys from awscli.compat import six from difflib import get_close_matches AWS_CLI_V2_MESSAGE = ( 'Note: AWS CLI version 2, the latest major version ' 'of the AWS CLI, is now stable and recommended for general ' 'use. For more information, see the AWS CLI version 2 ' 'installation instructions at: https://docs.aws.amazon.com/cli/' 'latest/userguide/install-cliv2.html' ) HELP_BLURB = ( "To see help text, you can run:\n" "\n" " aws help\n" " aws help\n" " aws help\n" ) USAGE = ( "\r%s\n\n" "usage: aws [options] " "[ ...] [parameters]\n" "%s" % (AWS_CLI_V2_MESSAGE, HELP_BLURB) ) class CommandAction(argparse.Action): """Custom action for CLI command arguments Allows the choices for the argument to be mutable. The choices are dynamically retrieved from the keys of the referenced command table """ def __init__(self, option_strings, dest, command_table, **kwargs): self.command_table = command_table super(CommandAction, self).__init__( option_strings, dest, choices=self.choices, **kwargs ) def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, self.dest, values) @property def choices(self): return list(self.command_table.keys()) @choices.setter def choices(self, val): # argparse.Action will always try to set this value upon # instantiation, but this value should be dynamically # generated from the command table keys. So make this a # NOOP if argparse.Action tries to set this value. pass class CLIArgParser(argparse.ArgumentParser): Formatter = argparse.RawTextHelpFormatter # When displaying invalid choice error messages, # this controls how many options to show per line. ChoicesPerLine = 2 def _check_value(self, action, value): """ It's probably not a great idea to override a "hidden" method but the default behavior is pretty ugly and there doesn't seem to be any other way to change it. """ # converted value must be one of the choices (if specified) if action.choices is not None and value not in action.choices: msg = ['Invalid choice, valid choices are:\n'] for i in range(len(action.choices))[::self.ChoicesPerLine]: current = [] for choice in action.choices[i:i+self.ChoicesPerLine]: current.append('%-40s' % choice) msg.append(' | '.join(current)) possible = get_close_matches(value, action.choices, cutoff=0.8) if possible: extra = ['\n\nInvalid choice: %r, maybe you meant:\n' % value] for word in possible: extra.append(' * %s' % word) msg.extend(extra) raise argparse.ArgumentError(action, '\n'.join(msg)) def parse_known_args(self, args, namespace=None): parsed, remaining = super(CLIArgParser, self).parse_known_args(args, namespace) terminal_encoding = getattr(sys.stdin, 'encoding', 'utf-8') if terminal_encoding is None: # In some cases, sys.stdin won't have an encoding set, # (e.g if it's set to a StringIO). In this case we just # default to utf-8. terminal_encoding = 'utf-8' for arg, value in vars(parsed).items(): if isinstance(value, six.binary_type): setattr(parsed, arg, value.decode(terminal_encoding)) elif isinstance(value, list): encoded = [] for v in value: if isinstance(v, six.binary_type): encoded.append(v.decode(terminal_encoding)) else: encoded.append(v) setattr(parsed, arg, encoded) return parsed, remaining class MainArgParser(CLIArgParser): Formatter = argparse.RawTextHelpFormatter def __init__(self, command_table, version_string, description, argument_table, prog=None): super(MainArgParser, self).__init__( formatter_class=self.Formatter, add_help=False, conflict_handler='resolve', description=description, usage=USAGE, prog=prog) self._build(command_table, version_string, argument_table) def _create_choice_help(self, choices): help_str = '' for choice in sorted(choices): help_str += '* %s\n' % choice return help_str def _build(self, command_table, version_string, argument_table): for argument_name in argument_table: argument = argument_table[argument_name] argument.add_to_parser(self) self.add_argument('--version', action="version", version=version_string, help='Display the version of this tool') self.add_argument('command', action=CommandAction, command_table=command_table) class ServiceArgParser(CLIArgParser): def __init__(self, operations_table, service_name): super(ServiceArgParser, self).__init__( formatter_class=argparse.RawTextHelpFormatter, add_help=False, conflict_handler='resolve', usage=USAGE) self._build(operations_table) self._service_name = service_name def _build(self, operations_table): self.add_argument('operation', action=CommandAction, command_table=operations_table) class ArgTableArgParser(CLIArgParser): """CLI arg parser based on an argument table.""" def __init__(self, argument_table, command_table=None): # command_table is an optional subcommand_table. If it's passed # in, then we'll update the argparse to parse a 'subcommand' argument # and populate the choices field with the command table keys. super(ArgTableArgParser, self).__init__( formatter_class=self.Formatter, add_help=False, usage=USAGE, conflict_handler='resolve') if command_table is None: command_table = {} self._build(argument_table, command_table) def _build(self, argument_table, command_table): for arg_name in argument_table: argument = argument_table[arg_name] argument.add_to_parser(self) if command_table: self.add_argument('subcommand', action=CommandAction, command_table=command_table, nargs='?') def parse_known_args(self, args, namespace=None): if len(args) == 1 and args[0] == 'help': namespace = argparse.Namespace() namespace.help = 'help' return namespace, [] else: return super(ArgTableArgParser, self).parse_known_args( args, namespace) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/argprocess.py0000644000000000000000000005153000000000000017114 0ustar00rootroot00000000000000# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """Module for processing CLI args.""" import os import logging from awscli.compat import six from botocore.compat import OrderedDict, json from awscli import SCALAR_TYPES, COMPLEX_TYPES from awscli import shorthand from awscli.utils import ( find_service_and_method_in_event_name, is_document_type, is_document_type_container ) from botocore.utils import is_json_value_header LOG = logging.getLogger('awscli.argprocess') class ParamError(Exception): def __init__(self, cli_name, message): """ :type cli_name: string :param cli_name: The complete cli argument name, e.g. "--foo-bar". It should include the leading hyphens if that's how a user would specify the name. :type message: string :param message: The error message to display to the user. """ full_message = ("Error parsing parameter '%s': %s" % (cli_name, message)) super(ParamError, self).__init__(full_message) self.cli_name = cli_name self.message = message class ParamSyntaxError(Exception): pass class ParamUnknownKeyError(Exception): def __init__(self, key, valid_keys): valid_keys = ', '.join(valid_keys) full_message = ( "Unknown key '%s', valid choices " "are: %s" % (key, valid_keys)) super(ParamUnknownKeyError, self).__init__(full_message) class TooComplexError(Exception): pass def unpack_argument(session, service_name, operation_name, cli_argument, value): """ Unpack an argument's value from the commandline. This is part one of a two step process in handling commandline arguments. Emits the load-cli-arg event with service, operation, and parameter names. Example:: load-cli-arg.ec2.describe-instances.foo """ param_name = getattr(cli_argument, 'name', 'anonymous') value_override = session.emit_first_non_none_response( 'load-cli-arg.%s.%s.%s' % (service_name, operation_name, param_name), param=cli_argument, value=value, service_name=service_name, operation_name=operation_name) if value_override is not None: value = value_override return value def detect_shape_structure(param): stack = [] return _detect_shape_structure(param, stack) def _detect_shape_structure(param, stack): if param.name in stack: return 'recursive' else: stack.append(param.name) try: if param.type_name in SCALAR_TYPES: return 'scalar' elif param.type_name == 'structure': sub_types = [_detect_shape_structure(p, stack) for p in param.members.values()] # We're distinguishing between structure(scalar) # and structure(scalars), because for the case of # a single scalar in a structure we can simplify # more than a structure(scalars). if len(sub_types) == 1 and all(p == 'scalar' for p in sub_types): return 'structure(scalar)' elif len(sub_types) > 1 and all(p == 'scalar' for p in sub_types): return 'structure(scalars)' else: return 'structure(%s)' % ', '.join(sorted(set(sub_types))) elif param.type_name == 'list': return 'list-%s' % _detect_shape_structure(param.member, stack) elif param.type_name == 'map': if param.value.type_name in SCALAR_TYPES: return 'map-scalar' else: return 'map-%s' % _detect_shape_structure(param.value, stack) finally: stack.pop() def unpack_cli_arg(cli_argument, value): """ Parses and unpacks the encoded string command line parameter and returns native Python data structures that can be passed to the Operation. :type cli_argument: :class:`awscli.arguments.BaseCLIArgument` :param cli_argument: The CLI argument object. :param value: The value of the parameter. This can be a number of different python types (str, list, etc). This is the value as it's specified on the command line. :return: The "unpacked" argument than can be sent to the `Operation` object in python. """ return _unpack_cli_arg(cli_argument.argument_model, value, cli_argument.cli_name) def _special_type(model): # check if model is jsonvalue header and that value is serializable if model.serialization.get('jsonvalue') and \ model.serialization.get('location') == 'header' and \ model.type_name == 'string': return True return False def _unpack_cli_arg(argument_model, value, cli_name): if is_json_value_header(argument_model) or \ is_document_type(argument_model): return _unpack_json_cli_arg(argument_model, value, cli_name) elif argument_model.type_name in SCALAR_TYPES: return unpack_scalar_cli_arg( argument_model, value, cli_name) elif argument_model.type_name in COMPLEX_TYPES: return _unpack_complex_cli_arg( argument_model, value, cli_name) else: return six.text_type(value) def _unpack_json_cli_arg(argument_model, value, cli_name): try: return json.loads(value, object_pairs_hook=OrderedDict) except ValueError as e: raise ParamError( cli_name, "Invalid JSON: %s\nJSON received: %s" % (e, value)) def _unpack_complex_cli_arg(argument_model, value, cli_name): type_name = argument_model.type_name if type_name == 'structure' or type_name == 'map': if value.lstrip()[0] == '{': return _unpack_json_cli_arg(argument_model, value, cli_name) raise ParamError(cli_name, "Invalid JSON:\n%s" % value) elif type_name == 'list': if isinstance(value, six.string_types): if value.lstrip()[0] == '[': return _unpack_json_cli_arg(argument_model, value, cli_name) elif isinstance(value, list) and len(value) == 1: single_value = value[0].strip() if single_value and single_value[0] == '[': return _unpack_json_cli_arg(argument_model, value[0], cli_name) try: # There's a couple of cases remaining here. # 1. It's possible that this is just a list of strings, i.e # --security-group-ids sg-1 sg-2 sg-3 => ['sg-1', 'sg-2', 'sg-3'] # 2. It's possible this is a list of json objects: # --filters '{"Name": ..}' '{"Name": ...}' member_shape_model = argument_model.member return [_unpack_cli_arg(member_shape_model, v, cli_name) for v in value] except (ValueError, TypeError) as e: # The list params don't have a name/cli_name attached to them # so they will have bad error messages. We're going to # attach the parent parameter to this error message to provide # a more helpful error message. raise ParamError(cli_name, value[0]) def unpack_scalar_cli_arg(argument_model, value, cli_name=''): # Note the cli_name is used strictly for error reporting. It's # not required to use unpack_scalar_cli_arg if argument_model.type_name == 'integer' or argument_model.type_name == 'long': return int(value) elif argument_model.type_name == 'float' or argument_model.type_name == 'double': # TODO: losing precision on double types return float(value) elif argument_model.type_name == 'blob' and \ argument_model.serialization.get('streaming'): file_path = os.path.expandvars(value) file_path = os.path.expanduser(file_path) if not os.path.isfile(file_path): msg = 'Blob values must be a path to a file.' raise ParamError(cli_name, msg) return open(file_path, 'rb') elif argument_model.type_name == 'boolean': if isinstance(value, six.string_types) and value.lower() == 'false': return False return bool(value) else: return value def _supports_shorthand_syntax(model): # Shorthand syntax is only supported if: # # 1. The argument is not a document type nor is a wrapper around a document # type (e.g. is a list of document types or a map of document types). These # should all be expressed as JSON input. # # 2. The argument is sufficiently complex, that is, it's base type is # a complex type *and* if it's a list, then it can't be a list of # scalar types. if is_document_type_container(model): return False return _is_complex_shape(model) def _is_complex_shape(model): if model.type_name not in ['structure', 'list', 'map']: return False elif model.type_name == 'list': if model.member.type_name not in ['structure', 'list', 'map']: return False return True class ParamShorthand(object): def _uses_old_list_case(self, service_id, operation_name, argument_name): """ Determines whether a given operation for a service needs to use the deprecated shorthand parsing case for lists of structures that only have a single member. """ cases = { 'firehose': { 'put-record-batch': ['records'] }, 'workspaces': { 'reboot-workspaces': ['reboot-workspace-requests'], 'rebuild-workspaces': ['rebuild-workspace-requests'], 'terminate-workspaces': ['terminate-workspace-requests'] }, 'elastic-load-balancing': { 'remove-tags': ['tags'], 'describe-instance-health': ['instances'], 'deregister-instances-from-load-balancer': ['instances'], 'register-instances-with-load-balancer': ['instances'] } } cases = cases.get(service_id, {}).get(operation_name, []) return argument_name in cases class ParamShorthandParser(ParamShorthand): def __init__(self): self._parser = shorthand.ShorthandParser() self._visitor = shorthand.BackCompatVisitor() def __call__(self, cli_argument, value, event_name, **kwargs): """Attempt to parse shorthand syntax for values. This is intended to be hooked up as an event handler (hence the **kwargs). Given ``param`` object and its string ``value``, figure out if we can parse it. If we can parse it, we return the parsed value (typically some sort of python dict). :type cli_argument: :class:`awscli.arguments.BaseCLIArgument` :param cli_argument: The CLI argument object. :type param: :class:`botocore.parameters.Parameter` :param param: The parameter object (includes various metadata about the parameter). :type value: str :param value: The value for the parameter type on the command line, e.g ``--foo this_value``, value would be ``"this_value"``. :returns: If we can parse the value we return the parsed value. If it looks like JSON, we return None (which tells the event emitter to use the default ``unpack_cli_arg`` provided that no other event handlers can parsed the value). If we run into an error parsing the value, a ``ParamError`` will be raised. """ if not self._should_parse_as_shorthand(cli_argument, value): return else: service_id, operation_name = \ find_service_and_method_in_event_name(event_name) return self._parse_as_shorthand( cli_argument, value, service_id, operation_name) def _parse_as_shorthand(self, cli_argument, value, service_id, operation_name): try: LOG.debug("Parsing param %s as shorthand", cli_argument.cli_name) handled_value = self._handle_special_cases( cli_argument, value, service_id, operation_name) if handled_value is not None: return handled_value if isinstance(value, list): # Because of how we're using argparse, list shapes # are configured with nargs='+' which means the ``value`` # is given to us "conveniently" as a list. When # this happens we need to parse each list element # individually. parsed = [self._parser.parse(v) for v in value] self._visitor.visit(parsed, cli_argument.argument_model) else: # Otherwise value is just a string. parsed = self._parser.parse(value) self._visitor.visit(parsed, cli_argument.argument_model) except shorthand.ShorthandParseError as e: raise ParamError(cli_argument.cli_name, str(e)) except (ParamError, ParamUnknownKeyError) as e: # The shorthand parse methods don't have the cli_name, # so any ParamError won't have this value. To accommodate # this, ParamErrors are caught and reraised with the cli_name # injected. raise ParamError(cli_argument.cli_name, str(e)) return parsed def _handle_special_cases(self, cli_argument, value, service_id, operation_name): # We need to handle a few special cases that the previous # parser handled in order to stay backwards compatible. model = cli_argument.argument_model if model.type_name == 'list' and \ model.member.type_name == 'structure' and \ len(model.member.members) == 1 and \ self._uses_old_list_case(service_id, operation_name, cli_argument.name): # First special case is handling a list of structures # of a single element such as: # # --instance-ids id-1 id-2 id-3 # # gets parsed as: # # [{"InstanceId": "id-1"}, {"InstanceId": "id-2"}, # {"InstanceId": "id-3"}] key_name = list(model.member.members.keys())[0] new_values = [{key_name: v} for v in value] return new_values elif model.type_name == 'structure' and \ len(model.members) == 1 and \ 'Value' in model.members and \ model.members['Value'].type_name == 'string' and \ '=' not in value: # Second special case is where a structure of a single # value whose member name is "Value" can be specified # as: # --instance-terminate-behavior shutdown # # gets parsed as: # {"Value": "shutdown"} return {'Value': value} def _should_parse_as_shorthand(self, cli_argument, value): # We first need to make sure this is a parameter that qualifies # for simplification. The first short-circuit case is if it looks # like json we immediately return. if value and isinstance(value, list): check_val = value[0] else: check_val = value if isinstance(check_val, six.string_types) and check_val.strip().startswith( ('[', '{')): LOG.debug("Param %s looks like JSON, not considered for " "param shorthand.", cli_argument.py_name) return False model = cli_argument.argument_model return _supports_shorthand_syntax(model) class ParamShorthandDocGen(ParamShorthand): """Documentation generator for param shorthand syntax.""" _DONT_DOC = object() _MAX_STACK = 3 def supports_shorthand(self, argument_model): """Checks if a CLI argument supports shorthand syntax.""" if argument_model is not None: return _supports_shorthand_syntax(argument_model) return False def generate_shorthand_example(self, cli_argument, service_id, operation_name): """Generate documentation for a CLI argument. :type cli_argument: awscli.arguments.BaseCLIArgument :param cli_argument: The CLI argument which to generate documentation for. :return: Returns either a string or ``None``. If a string is returned, it is the generated shorthand example. If a value of ``None`` is returned then this indicates that no shorthand syntax is available for the provided ``argument_model``. """ docstring = self._handle_special_cases( cli_argument, service_id, operation_name) if docstring is self._DONT_DOC: return None elif docstring: return docstring # Otherwise we fall back to the normal docgen for shorthand # syntax. stack = [] try: if cli_argument.argument_model.type_name == 'list': argument_model = cli_argument.argument_model.member return self._shorthand_docs(argument_model, stack) + ' ...' else: return self._shorthand_docs(cli_argument.argument_model, stack) except TooComplexError: return '' def _handle_special_cases(self, cli_argument, service_id, operation_name): model = cli_argument.argument_model if model.type_name == 'list' and \ model.member.type_name == 'structure' and \ len(model.member.members) == 1 and \ self._uses_old_list_case( service_id, operation_name, cli_argument.name): member_name = list(model.member.members)[0] # Handle special case where the min/max is exactly one. metadata = model.metadata if metadata.get('min') == 1 and metadata.get('max') == 1: return '%s %s1' % (cli_argument.cli_name, member_name) return '%s %s1 %s2 %s3' % (cli_argument.cli_name, member_name, member_name, member_name) elif model.type_name == 'structure' and \ len(model.members) == 1 and \ 'Value' in model.members and \ model.members['Value'].type_name == 'string': return self._DONT_DOC return '' def _shorthand_docs(self, argument_model, stack): if len(stack) > self._MAX_STACK: raise TooComplexError() if argument_model.type_name == 'structure': return self._structure_docs(argument_model, stack) elif argument_model.type_name == 'list': return self._list_docs(argument_model, stack) elif argument_model.type_name == 'map': return self._map_docs(argument_model, stack) else: return argument_model.type_name def _list_docs(self, argument_model, stack): list_member = argument_model.member stack.append(list_member.name) try: element_docs = self._shorthand_docs(argument_model.member, stack) finally: stack.pop() if list_member.type_name in COMPLEX_TYPES or len(stack) > 1: return '[%s,%s]' % (element_docs, element_docs) else: return '%s,%s' % (element_docs, element_docs) def _map_docs(self, argument_model, stack): k = argument_model.key value_docs = self._shorthand_docs(argument_model.value, stack) start = 'KeyName1=%s,KeyName2=%s' % (value_docs, value_docs) if k.enum and not stack: start += '\n\nWhere valid key names are:\n' for enum in k.enum: start += ' %s\n' % enum elif stack: start = '{%s}' % start return start def _structure_docs(self, argument_model, stack): parts = [] for name, member_shape in argument_model.members.items(): if is_document_type_container(member_shape): continue parts.append(self._member_docs(name, member_shape, stack)) inner_part = ','.join(parts) if not stack: return inner_part return '{%s}' % inner_part def _member_docs(self, name, shape, stack): if stack.count(shape.name) > 0: return '( ... recursive ... )' stack.append(shape.name) try: value_doc = self._shorthand_docs(shape, stack) finally: stack.pop() return '%s=%s' % (name, value_doc) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014272.0 awscli-1.22.34/awscli/arguments.py0000644000000000000000000004460300000000000016754 0ustar00rootroot00000000000000# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """Abstractions for CLI arguments. This module contains abstractions for representing CLI arguments. This includes how the CLI argument parser is created, how arguments are serialized, and how arguments are bound (if at all) to operation arguments. The BaseCLIArgument is the interface for all arguments. This is the interface expected by objects that work with arguments. If you want to implement your own argument subclass, make sure it implements everything in BaseCLIArgument. Arguments generally fall into one of several categories: * global argument. These arguments may influence what the CLI does, but aren't part of the input parameters needed to make an API call. For example, the ``--region`` argument specifies which region to send the request to. The ``--output`` argument specifies how to display the response to the user. The ``--query`` argument specifies how to select specific elements from a response. * operation argument. These are arguments that influence the parameters we send to a service when making an API call. Some of these arguments are automatically created directly from introspecting the JSON service model. Sometimes customizations may provide a pseudo-argument that takes the user input and maps the input value to several API parameters. """ import logging from botocore import xform_name from botocore.hooks import first_non_none_response from awscli.argprocess import unpack_cli_arg from awscli.schema import SchemaTransformer from botocore import model LOG = logging.getLogger('awscli.arguments') class UnknownArgumentError(Exception): pass def create_argument_model_from_schema(schema): # Given a JSON schema (described in schema.py), convert it # to a shape object from `botocore.model.Shape` that can be # used as the argument_model for the Argument classes below. transformer = SchemaTransformer() shapes_map = transformer.transform(schema) shape_resolver = model.ShapeResolver(shapes_map) # The SchemaTransformer guarantees that the top level shape # will always be named 'InputShape'. arg_shape = shape_resolver.get_shape_by_name('InputShape') return arg_shape class BaseCLIArgument(object): """Interface for CLI argument. This class represents the interface used for representing CLI arguments. """ def __init__(self, name): self._name = name def add_to_arg_table(self, argument_table): """Add this object to the argument_table. The ``argument_table`` represents the argument for the operation. This is called by the ``ServiceOperation`` object to create the arguments associated with the operation. :type argument_table: dict :param argument_table: The argument table. The key is the argument name, and the value is an object implementing this interface. """ argument_table[self.name] = self def add_to_parser(self, parser): """Add this object to the parser instance. This method is called by the associated ``ArgumentParser`` instance. This method should make the relevant calls to ``add_argument`` to add itself to the argparser. :type parser: ``argparse.ArgumentParser``. :param parser: The argument parser associated with the operation. """ pass def add_to_params(self, parameters, value): """Add this object to the parameters dict. This method is responsible for taking the value specified on the command line, and deciding how that corresponds to parameters used by the service/operation. :type parameters: dict :param parameters: The parameters dictionary that will be given to ``botocore``. This should match up to the parameters associated with the particular operation. :param value: The value associated with the CLI option. """ pass @property def name(self): return self._name @property def cli_name(self): return '--' + self._name @property def cli_type_name(self): raise NotImplementedError("cli_type_name") @property def required(self): raise NotImplementedError("required") @property def documentation(self): raise NotImplementedError("documentation") @property def cli_type(self): raise NotImplementedError("cli_type") @property def py_name(self): return self._name.replace('-', '_') @property def choices(self): """List valid choices for argument value. If this value is not None then this should return a list of valid values for the argument. """ return None @property def synopsis(self): return '' @property def positional_arg(self): return False @property def nargs(self): return None @name.setter def name(self, value): self._name = value @property def group_name(self): """Get the group name associated with the argument. An argument can be part of a group. This property will return the name of that group. This base class has no default behavior for groups, code that consumes argument objects can use them for whatever purposes they like (documentation, mutually exclusive group validation, etc.). """ return None class CustomArgument(BaseCLIArgument): """ Represents a CLI argument that is configured from a dictionary. For example, the "top level" arguments used for the CLI (--region, --output) can use a CustomArgument argument, as these are described in the cli.json file as dictionaries. This class is also useful for plugins/customizations that want to add additional args. """ def __init__(self, name, help_text='', dest=None, default=None, action=None, required=None, choices=None, nargs=None, cli_type_name=None, group_name=None, positional_arg=False, no_paramfile=False, argument_model=None, synopsis='', const=None): self._name = name self._help = help_text self._dest = dest self._default = default self._action = action self._required = required self._nargs = nargs self._const = const self._cli_type_name = cli_type_name self._group_name = group_name self._positional_arg = positional_arg if choices is None: choices = [] self._choices = choices self._synopsis = synopsis # These are public attributes that are ok to access from external # objects. self.no_paramfile = no_paramfile self.argument_model = None if argument_model is None: argument_model = self._create_scalar_argument_model() self.argument_model = argument_model # If the top level element is a list then set nargs to # accept multiple values separated by a space. if self.argument_model is not None and \ self.argument_model.type_name == 'list': self._nargs = '+' def _create_scalar_argument_model(self): if self._nargs is not None: # If nargs is not None then argparse will parse the value # as a list, so we don't create an argument_object so we don't # go through param validation. return None # If no argument model is provided, we create a basic # shape argument. type_name = self.cli_type_name return create_argument_model_from_schema({'type': type_name}) @property def cli_name(self): if self._positional_arg: return self._name else: return '--' + self._name def add_to_parser(self, parser): """ See the ``BaseCLIArgument.add_to_parser`` docs for more information. """ cli_name = self.cli_name kwargs = {} if self._dest is not None: kwargs['dest'] = self._dest if self._action is not None: kwargs['action'] = self._action if self._default is not None: kwargs['default'] = self._default if self._choices: kwargs['choices'] = self._choices if self._required is not None: kwargs['required'] = self._required if self._nargs is not None: kwargs['nargs'] = self._nargs if self._const is not None: kwargs['const'] = self._const parser.add_argument(cli_name, **kwargs) @property def required(self): if self._required is None: return False return self._required @required.setter def required(self, value): self._required = value @property def documentation(self): return self._help @property def cli_type_name(self): if self._cli_type_name is not None: return self._cli_type_name elif self._action in ['store_true', 'store_false']: return 'boolean' elif self.argument_model is not None: return self.argument_model.type_name else: # Default to 'string' type if we don't have any # other info. return 'string' @property def cli_type(self): cli_type = str if self._action in ['store_true', 'store_false']: cli_type = bool return cli_type @property def choices(self): return self._choices @property def group_name(self): return self._group_name @property def synopsis(self): return self._synopsis @property def positional_arg(self): return self._positional_arg @property def nargs(self): return self._nargs class CLIArgument(BaseCLIArgument): """Represents a CLI argument that maps to a service parameter. """ TYPE_MAP = { 'structure': str, 'map': str, 'timestamp': str, 'list': str, 'string': str, 'float': float, 'integer': str, 'long': int, 'boolean': bool, 'double': float, 'blob': str } def __init__(self, name, argument_model, operation_model, event_emitter, is_required=False, serialized_name=None): """ :type name: str :param name: The name of the argument in "cli" form (e.g. ``min-instances``). :type argument_model: ``botocore.model.Shape`` :param argument_model: The shape object that models the argument. :type argument_model: ``botocore.model.OperationModel`` :param argument_model: The object that models the associated operation. :type event_emitter: ``botocore.hooks.BaseEventHooks`` :param event_emitter: The event emitter to use when emitting events. This class will emit events during parts of the argument parsing process. This event emitter is what is used to emit such events. :type is_required: boolean :param is_required: Indicates if this parameter is required or not. """ self._name = name # This is the name we need to use when constructing the parameters # dict we send to botocore. While we can change the .name attribute # which is the name exposed in the CLI, the serialized name we use # for botocore is invariant and should not be changed. if serialized_name is None: serialized_name = name self._serialized_name = serialized_name self.argument_model = argument_model self._required = is_required self._operation_model = operation_model self._event_emitter = event_emitter self._documentation = argument_model.documentation @property def py_name(self): return self._name.replace('-', '_') @property def required(self): return self._required @required.setter def required(self, value): self._required = value @property def documentation(self): return self._documentation @documentation.setter def documentation(self, value): self._documentation = value @property def cli_type_name(self): return self.argument_model.type_name @property def cli_type(self): return self.TYPE_MAP.get(self.argument_model.type_name, str) def add_to_parser(self, parser): """ See the ``BaseCLIArgument.add_to_parser`` docs for more information. """ cli_name = self.cli_name parser.add_argument( cli_name, help=self.documentation, type=self.cli_type, required=self.required) def add_to_params(self, parameters, value): if value is None: return else: # This is a two step process. First is the process of converting # the command line value into a python value. Normally this is # handled by argparse directly, but there are cases where extra # processing is needed. For example, "--foo name=value" the value # can be converted from "name=value" to {"name": "value"}. This is # referred to as the "unpacking" process. Once we've unpacked the # argument value, we have to decide how this is converted into # something that can be consumed by botocore. Many times this is # just associating the key and value in the params dict as down # below. Sometimes this can be more complicated, and subclasses # can customize as they need. unpacked = self._unpack_argument(value) LOG.debug('Unpacked value of %r for parameter "%s": %r', value, self.py_name, unpacked) parameters[self._serialized_name] = unpacked def _unpack_argument(self, value): service_name = self._operation_model.service_model.service_name operation_name = xform_name(self._operation_model.name, '-') override = self._emit_first_response('process-cli-arg.%s.%s' % ( service_name, operation_name), param=self.argument_model, cli_argument=self, value=value) if override is not None: # A plugin supplied an alternate conversion, # use it instead. return override else: # Fall back to the default arg processing. return unpack_cli_arg(self, value) def _emit(self, name, **kwargs): return self._event_emitter.emit(name, **kwargs) def _emit_first_response(self, name, **kwargs): responses = self._emit(name, **kwargs) return first_non_none_response(responses) class ListArgument(CLIArgument): def add_to_parser(self, parser): cli_name = self.cli_name parser.add_argument(cli_name, nargs='*', type=self.cli_type, required=self.required) class BooleanArgument(CLIArgument): """Represent a boolean CLI argument. A boolean parameter is specified without a value:: aws foo bar --enabled For cases where the boolean parameter is required we need to add two parameters:: aws foo bar --enabled aws foo bar --no-enabled We use the capabilities of the CLIArgument to help achieve this. """ def __init__(self, name, argument_model, operation_model, event_emitter, is_required=False, action='store_true', dest=None, group_name=None, default=None, serialized_name=None): super(BooleanArgument, self).__init__(name, argument_model, operation_model, event_emitter, is_required, serialized_name=serialized_name) self._mutex_group = None self._action = action if dest is None: self._destination = self.py_name else: self._destination = dest if group_name is None: self._group_name = self.name else: self._group_name = group_name self._default = default def add_to_params(self, parameters, value): # If a value was explicitly specified (so value is True/False # but *not* None) then we add it to the params dict. # If the value was not explicitly set (value is None) # we don't add it to the params dict. if value is not None: parameters[self._serialized_name] = value def add_to_arg_table(self, argument_table): # Boolean parameters are a bit tricky. For a single boolean parameter # we actually want two CLI params, a --foo, and a --no-foo. To do this # we need to add two entries to the argument table. So we can add # ourself as the positive option (--no), and then create a clone of # ourselves for the negative service. We then insert both into the # arg table. argument_table[self.name] = self negative_name = 'no-%s' % self.name negative_version = self.__class__( negative_name, self.argument_model, self._operation_model, self._event_emitter, action='store_false', dest=self._destination, group_name=self.group_name, serialized_name=self._serialized_name) argument_table[negative_name] = negative_version def add_to_parser(self, parser): parser.add_argument(self.cli_name, help=self.documentation, action=self._action, default=self._default, dest=self._destination) @property def group_name(self): return self._group_name ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0852723 awscli-1.22.34/awscli/bcdoc/0000755000000000000000000000000000000000000015440 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/bcdoc/__init__.py0000644000000000000000000000111400000000000017546 0ustar00rootroot00000000000000# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. __version__ = '0.16.0' ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/bcdoc/docevents.py0000644000000000000000000001117500000000000020011 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. DOC_EVENTS = { 'doc-breadcrumbs': '.%s', 'doc-title': '.%s', 'doc-description': '.%s', 'doc-synopsis-start': '.%s', 'doc-synopsis-option': '.%s.%s', 'doc-synopsis-end': '.%s', 'doc-options-start': '.%s', 'doc-option': '.%s.%s', 'doc-option-example': '.%s.%s', 'doc-options-end': '.%s', 'doc-examples': '.%s', 'doc-output': '.%s', 'doc-subitems-start': '.%s', 'doc-subitem': '.%s.%s', 'doc-subitems-end': '.%s', 'doc-relateditems-start': '.%s', 'doc-relateditem': '.%s.%s', 'doc-relateditems-end': '.%s' } def generate_events(session, help_command): # Now generate the documentation events session.emit('doc-breadcrumbs.%s' % help_command.event_class, help_command=help_command) session.emit('doc-title.%s' % help_command.event_class, help_command=help_command) session.emit('doc-description.%s' % help_command.event_class, help_command=help_command) session.emit('doc-synopsis-start.%s' % help_command.event_class, help_command=help_command) if help_command.arg_table: for arg_name in help_command.arg_table: # An argument can set an '_UNDOCUMENTED' attribute # to True to indicate a parameter that exists # but shouldn't be documented. This can be used # for backwards compatibility of deprecated arguments. if getattr(help_command.arg_table[arg_name], '_UNDOCUMENTED', False): continue session.emit( 'doc-synopsis-option.%s.%s' % (help_command.event_class, arg_name), arg_name=arg_name, help_command=help_command) session.emit('doc-synopsis-end.%s' % help_command.event_class, help_command=help_command) session.emit('doc-options-start.%s' % help_command.event_class, help_command=help_command) if help_command.arg_table: for arg_name in help_command.arg_table: if getattr(help_command.arg_table[arg_name], '_UNDOCUMENTED', False): continue session.emit('doc-option.%s.%s' % (help_command.event_class, arg_name), arg_name=arg_name, help_command=help_command) session.emit('doc-option-example.%s.%s' % (help_command.event_class, arg_name), arg_name=arg_name, help_command=help_command) session.emit('doc-options-end.%s' % help_command.event_class, help_command=help_command) session.emit('doc-subitems-start.%s' % help_command.event_class, help_command=help_command) if help_command.command_table: for command_name in sorted(help_command.command_table.keys()): if hasattr(help_command.command_table[command_name], '_UNDOCUMENTED'): continue session.emit('doc-subitem.%s.%s' % (help_command.event_class, command_name), command_name=command_name, help_command=help_command) session.emit('doc-subitems-end.%s' % help_command.event_class, help_command=help_command) session.emit('doc-examples.%s' % help_command.event_class, help_command=help_command) session.emit('doc-output.%s' % help_command.event_class, help_command=help_command) session.emit('doc-relateditems-start.%s' % help_command.event_class, help_command=help_command) if help_command.related_items: for related_item in sorted(help_command.related_items): session.emit('doc-relateditem.%s.%s' % (help_command.event_class, related_item), help_command=help_command, related_item=related_item) session.emit('doc-relateditems-end.%s' % help_command.event_class, help_command=help_command) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/bcdoc/docstringparser.py0000644000000000000000000001340100000000000021222 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from botocore.compat import six class DocStringParser(six.moves.html_parser.HTMLParser): """ A simple HTML parser. Focused on converting the subset of HTML that appears in the documentation strings of the JSON models into simple ReST format. """ def __init__(self, doc): self.tree = None self.doc = doc six.moves.html_parser.HTMLParser.__init__(self) def reset(self): six.moves.html_parser.HTMLParser.reset(self) self.tree = HTMLTree(self.doc) def feed(self, data): # HTMLParser is an old style class, so the super() method will not work. six.moves.html_parser.HTMLParser.feed(self, data) self.tree.write() self.tree = HTMLTree(self.doc) def close(self): six.moves.html_parser.HTMLParser.close(self) # Write if there is anything remaining. self.tree.write() self.tree = HTMLTree(self.doc) def handle_starttag(self, tag, attrs): self.tree.add_tag(tag, attrs=attrs) def handle_endtag(self, tag): self.tree.add_tag(tag, is_start=False) def handle_data(self, data): self.tree.add_data(data) class HTMLTree(object): """ A tree which handles HTML nodes. Designed to work with a python HTML parser, meaning that the current_node will be the most recently opened tag. When a tag is closed, the current_node moves up to the parent node. """ def __init__(self, doc): self.doc = doc self.head = StemNode() self.current_node = self.head self.unhandled_tags = [] def add_tag(self, tag, attrs=None, is_start=True): if not self._doc_has_handler(tag, is_start): self.unhandled_tags.append(tag) return if is_start: if tag == 'li': node = LineItemNode(attrs) else: node = TagNode(tag, attrs) self.current_node.add_child(node) self.current_node = node else: self.current_node = self.current_node.parent def _doc_has_handler(self, tag, is_start): if is_start: handler_name = 'start_%s' % tag else: handler_name = 'end_%s' % tag return hasattr(self.doc.style, handler_name) def add_data(self, data): self.current_node.add_child(DataNode(data)) def write(self): self.head.write(self.doc) class Node(object): def __init__(self, parent=None): self.parent = parent def write(self, doc): raise NotImplementedError class StemNode(Node): def __init__(self, parent=None): super(StemNode, self).__init__(parent) self.children = [] def add_child(self, child): child.parent = self self.children.append(child) def write(self, doc): self._write_children(doc) def _write_children(self, doc): for child in self.children: child.write(doc) class TagNode(StemNode): """ A generic Tag node. It will verify that handlers exist before writing. """ def __init__(self, tag, attrs=None, parent=None): super(TagNode, self).__init__(parent) self.attrs = attrs self.tag = tag def write(self, doc): self._write_start(doc) self._write_children(doc) self._write_end(doc) def _write_start(self, doc): handler_name = 'start_%s' % self.tag if hasattr(doc.style, handler_name): getattr(doc.style, handler_name)(self.attrs) def _write_end(self, doc): handler_name = 'end_%s' % self.tag if hasattr(doc.style, handler_name): getattr(doc.style, handler_name)() class LineItemNode(TagNode): def __init__(self, attrs=None, parent=None): super(LineItemNode, self).__init__('li', attrs, parent) def write(self, doc): self._lstrip(self) super(LineItemNode, self).write(doc) def _lstrip(self, node): """ Traverses the tree, stripping out whitespace until text data is found :param node: The node to strip :return: True if non-whitespace data was found, False otherwise """ for child in node.children: if isinstance(child, DataNode): child.lstrip() if child.data: return True else: found = self._lstrip(child) if found: return True return False class DataNode(Node): """ A Node that contains only string data. """ def __init__(self, data, parent=None): super(DataNode, self).__init__(parent) if not isinstance(data, six.string_types): raise ValueError("Expecting string type, %s given." % type(data)) self.data = data def lstrip(self): self.data = self.data.lstrip() def write(self, doc): if not self.data: return if self.data.isspace(): str_data = ' ' else: end_space = self.data[-1].isspace() words = self.data.split() words = doc.translate_words(words) str_data = ' '.join(words) if end_space: str_data += ' ' doc.handle_data(str_data) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/bcdoc/restdoc.py0000644000000000000000000001605400000000000017463 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging from botocore.compat import OrderedDict from awscli.bcdoc.docstringparser import DocStringParser from awscli.bcdoc.style import ReSTStyle LOG = logging.getLogger('bcdocs') class ReSTDocument(object): def __init__(self, target='man'): self.style = ReSTStyle(self) self.target = target self.parser = DocStringParser(self) self.keep_data = True self.do_translation = False self.translation_map = {} self.hrefs = {} self._writes = [] self._last_doc_string = None def _write(self, s): if self.keep_data and s is not None: self._writes.append(s) def write(self, content): """ Write content into the document. """ self._write(content) def writeln(self, content): """ Write content on a newline. """ self._write('%s%s\n' % (self.style.spaces(), content)) def peek_write(self): """ Returns the last content written to the document without removing it from the stack. """ return self._writes[-1] def pop_write(self): """ Removes and returns the last content written to the stack. """ return self._writes.pop() def push_write(self, s): """ Places new content on the stack. """ self._writes.append(s) def getvalue(self): """ Returns the current content of the document as a string. """ if self.hrefs: self.style.new_paragraph() for refname, link in self.hrefs.items(): self.style.link_target_definition(refname, link) return ''.join(self._writes).encode('utf-8') def translate_words(self, words): return [self.translation_map.get(w, w) for w in words] def handle_data(self, data): if data and self.keep_data: self._write(data) def include_doc_string(self, doc_string): if doc_string: try: start = len(self._writes) self.parser.feed(doc_string) self.parser.close() end = len(self._writes) self._last_doc_string = (start, end) except Exception: LOG.debug('Error parsing doc string', exc_info=True) LOG.debug(doc_string) def remove_last_doc_string(self): # Removes all writes inserted by last doc string if self._last_doc_string is not None: start, end = self._last_doc_string del self._writes[start:end] class DocumentStructure(ReSTDocument): def __init__(self, name, section_names=None, target='man', context=None): """Provides a Hierarichial structure to a ReSTDocument You can write to it similiar to as you can to a ReSTDocument but has an innate structure for more orginaztion and abstraction. :param name: The name of the document :param section_names: A list of sections to be included in the document. :param target: The target documentation of the Document structure :param context: A dictionary of data to store with the strucuture. These are only stored per section not the entire structure. """ super(DocumentStructure, self).__init__(target=target) self._name = name self._structure = OrderedDict() self._path = [self._name] self._context = {} if context is not None: self._context = context if section_names is not None: self._generate_structure(section_names) @property def name(self): """The name of the document structure""" return self._name @property def path(self): """ A list of where to find a particular document structure in the overlying document structure. """ return self._path @path.setter def path(self, value): self._path = value @property def available_sections(self): return list(self._structure) @property def context(self): return self._context def _generate_structure(self, section_names): for section_name in section_names: self.add_new_section(section_name) def add_new_section(self, name, context=None): """Adds a new section to the current document structure This document structure will be considered a section to the current document structure but will in itself be an entirely new document structure that can be written to and have sections as well :param name: The name of the section. :param context: A dictionary of data to store with the strucuture. These are only stored per section not the entire structure. :rtype: DocumentStructure :returns: A new document structure to add to but lives as a section to the document structure it was instantiated from. """ # Add a new section section = self.__class__(name=name, target=self.target, context=context) section.path = self.path + [name] # Indent the section apporpriately as well section.style.indentation = self.style.indentation section.translation_map = self.translation_map section.hrefs = self.hrefs self._structure[name] = section return section def get_section(self, name): """Retrieve a section""" return self._structure[name] def delete_section(self, name): """Delete a section""" del self._structure[name] def flush_structure(self): """Flushes a doc structure to a ReSTructed string The document is flushed out in a DFS style where sections and their subsections' values are added to the string as they are visited. """ # We are at the root flush the links at the beginning of the # document if len(self.path) == 1: if self.hrefs: self.style.new_paragraph() for refname, link in self.hrefs.items(): self.style.link_target_definition(refname, link) value = self.getvalue() for name, section in self._structure.items(): value += section.flush_structure() return value def getvalue(self): return ''.join(self._writes).encode('utf-8') def remove_all_sections(self): self._structure = OrderedDict() def clear_text(self): self._writes = [] ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/bcdoc/style.py0000644000000000000000000002707100000000000017161 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging logger = logging.getLogger('bcdocs') class BaseStyle(object): def __init__(self, doc, indent_width=2): self.doc = doc self.indent_width = indent_width self._indent = 0 self.keep_data = True @property def indentation(self): return self._indent @indentation.setter def indentation(self, value): self._indent = value def new_paragraph(self): return '\n%s' % self.spaces() def indent(self): self._indent += 1 def dedent(self): if self._indent > 0: self._indent -= 1 def spaces(self): return ' ' * (self._indent * self.indent_width) def bold(self, s): return s def ref(self, link, title=None): return link def h2(self, s): return s def h3(self, s): return s def underline(self, s): return s def italics(self, s): return s class ReSTStyle(BaseStyle): def __init__(self, doc, indent_width=2): BaseStyle.__init__(self, doc, indent_width) self.do_p = True self.a_href = None self.list_depth = 0 def new_paragraph(self): self.doc.write('\n\n%s' % self.spaces()) def new_line(self): self.doc.write('\n%s' % self.spaces()) def _start_inline(self, markup): self.doc.write(markup) def _end_inline(self, markup): # Sometimes the HTML markup has whitespace between the end # of the text inside the inline markup and the closing element # (e.g. foobar ). This trailing space will cause # problems in the ReST inline markup so we remove it here # by popping the last item written off the stack, striping # the whitespace and then pushing it back on the stack. last_write = self.doc.pop_write().rstrip(' ') # Sometimes, for whatever reason, a tag like is present. This # is problematic because if we simply translate that directly then # we end up with something like ****, which rst will assume is a # heading instead of an empty bold. if last_write == markup: return self.doc.push_write(last_write) self.doc.write(markup + ' ') def start_bold(self, attrs=None): self._start_inline('**') def end_bold(self): self._end_inline('**') def start_b(self, attrs=None): self.doc.do_translation = True self.start_bold(attrs) def end_b(self): self.doc.do_translation = False self.end_bold() def bold(self, s): if s: self.start_bold() self.doc.write(s) self.end_bold() def ref(self, title, link=None): if link is None: link = title self.doc.write(':doc:`%s <%s>`' % (title, link)) def _heading(self, s, border_char): border = border_char * len(s) self.new_paragraph() self.doc.write('%s\n%s\n%s' % (border, s, border)) self.new_paragraph() def h1(self, s): self._heading(s, '*') def h2(self, s): self._heading(s, '=') def h3(self, s): self._heading(s, '-') def start_italics(self, attrs=None): self._start_inline('*') def end_italics(self): self._end_inline('*') def italics(self, s): if s: self.start_italics() self.doc.write(s) self.end_italics() def start_p(self, attrs=None): if self.do_p: self.doc.write('\n\n%s' % self.spaces()) def end_p(self): if self.do_p: self.doc.write('\n\n%s' % self.spaces()) def start_code(self, attrs=None): self.doc.do_translation = True self._start_inline('``') def end_code(self): self.doc.do_translation = False self._end_inline('``') def code(self, s): if s: self.start_code() self.doc.write(s) self.end_code() def start_note(self, attrs=None): self.new_paragraph() self.doc.write('.. note::') self.indent() self.new_paragraph() def end_note(self): self.dedent() self.new_paragraph() def start_important(self, attrs=None): self.new_paragraph() self.doc.write('.. warning::') self.indent() self.new_paragraph() def end_important(self): self.dedent() self.new_paragraph() def start_danger(self, attrs=None): self.new_paragraph() self.doc.write('.. danger::') self.indent() self.new_paragraph() def end_danger(self): self.dedent() self.new_paragraph() def start_a(self, attrs=None): if attrs: for attr_key, attr_value in attrs: if attr_key == 'href': self.a_href = attr_value self.doc.write('`') else: # There are some model documentation that # looks like this: DescribeInstances. # In this case we just write out an empty # string. self.doc.write(' ') self.doc.do_translation = True def link_target_definition(self, refname, link): self.doc.writeln('.. _%s: %s' % (refname, link)) def sphinx_reference_label(self, label, text=None): if text is None: text = label if self.doc.target == 'html': self.doc.write(':ref:`%s <%s>`' % (text, label)) else: self.doc.write(text) def end_a(self): self.doc.do_translation = False if self.a_href: last_write = self.doc.pop_write() last_write = last_write.rstrip(' ') if last_write and last_write != '`': if ':' in last_write: last_write = last_write.replace(':', r'\:') self.doc.push_write(last_write) self.doc.push_write(' <%s>`__' % self.a_href) elif last_write == '`': # Look at start_a(). It will do a self.doc.write('`') # which is the start of the link title. If that is the # case then there was no link text. We should just # use an inline link. The syntax of this is # ``_ self.doc.push_write('`<%s>`__' % self.a_href) else: self.doc.push_write(self.a_href) self.doc.hrefs[self.a_href] = self.a_href self.doc.write('`__') self.a_href = None self.doc.write(' ') def start_i(self, attrs=None): self.doc.do_translation = True self.start_italics() def end_i(self): self.doc.do_translation = False self.end_italics() def start_li(self, attrs=None): self.new_line() self.do_p = False self.doc.write('* ') def end_li(self): self.do_p = True self.new_line() def li(self, s): if s: self.start_li() self.doc.writeln(s) self.end_li() def start_ul(self, attrs=None): if self.list_depth != 0: self.indent() self.list_depth += 1 self.new_paragraph() def end_ul(self): self.list_depth -= 1 if self.list_depth != 0: self.dedent() self.new_paragraph() def start_ol(self, attrs=None): # TODO: Need to control the bullets used for LI items if self.list_depth != 0: self.indent() self.list_depth += 1 self.new_paragraph() def end_ol(self): self.list_depth -= 1 if self.list_depth != 0: self.dedent() self.new_paragraph() def start_examples(self, attrs=None): self.doc.keep_data = False def end_examples(self): self.doc.keep_data = True def start_fullname(self, attrs=None): self.doc.keep_data = False def end_fullname(self): self.doc.keep_data = True def start_codeblock(self, attrs=None): self.doc.write('::') self.indent() self.new_paragraph() def end_codeblock(self): self.dedent() self.new_paragraph() def codeblock(self, code): """ Literal code blocks are introduced by ending a paragraph with the special marker ::. The literal block must be indented (and, like all paragraphs, separated from the surrounding ones by blank lines). """ self.start_codeblock() self.doc.writeln(code) self.end_codeblock() def toctree(self): if self.doc.target == 'html': self.doc.write('\n.. toctree::\n') self.doc.write(' :maxdepth: 1\n') self.doc.write(' :titlesonly:\n\n') else: self.start_ul() def tocitem(self, item, file_name=None): if self.doc.target == 'man': self.li(item) else: if file_name: self.doc.writeln(' %s' % file_name) else: self.doc.writeln(' %s' % item) def hidden_toctree(self): if self.doc.target == 'html': self.doc.write('\n.. toctree::\n') self.doc.write(' :maxdepth: 1\n') self.doc.write(' :hidden:\n\n') def hidden_tocitem(self, item): if self.doc.target == 'html': self.tocitem(item) def table_of_contents(self, title=None, depth=None): self.doc.write('.. contents:: ') if title is not None: self.doc.writeln(title) if depth is not None: self.doc.writeln(' :depth: %s' % depth) def start_sphinx_py_class(self, class_name): self.new_paragraph() self.doc.write('.. py:class:: %s' % class_name) self.indent() self.new_paragraph() def end_sphinx_py_class(self): self.dedent() self.new_paragraph() def start_sphinx_py_method(self, method_name, parameters=None): self.new_paragraph() content = '.. py:method:: %s' % method_name if parameters is not None: content += '(%s)' % parameters self.doc.write(content) self.indent() self.new_paragraph() def end_sphinx_py_method(self): self.dedent() self.new_paragraph() def start_sphinx_py_attr(self, attr_name): self.new_paragraph() self.doc.write('.. py:attribute:: %s' % attr_name) self.indent() self.new_paragraph() def end_sphinx_py_attr(self): self.dedent() self.new_paragraph() def write_py_doc_string(self, docstring): docstring_lines = docstring.splitlines() for docstring_line in docstring_lines: self.doc.writeln(docstring_line) def external_link(self, title, link): if self.doc.target == 'html': self.doc.write('`%s <%s>`_' % (title, link)) else: self.doc.write(title) def internal_link(self, title, page): if self.doc.target == 'html': self.doc.write(':doc:`%s <%s>`' % (title, page)) else: self.doc.write(title) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/bcdoc/textwriter.py0000644000000000000000000005020300000000000020233 0ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Custom docutils writer for plain text. Based heavily on the Sphinx text writer. See copyright below. :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ import os import re import textwrap from docutils import nodes, writers class TextWrapper(textwrap.TextWrapper): """Custom subclass that uses a different word separator regex.""" wordsep_re = re.compile( r'(\s+|' # any whitespace r'(?<=\s)(?::[a-z-]+:)?`\S+|' # interpreted text start r'[^\s\w]*\w+[a-zA-Z]-(?=\w+[a-zA-Z])|' # hyphenated words r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash MAXWIDTH = 70 STDINDENT = 3 def my_wrap(text, width=MAXWIDTH, **kwargs): w = TextWrapper(width=width, **kwargs) return w.wrap(text) class TextWriter(writers.Writer): supported = ('text',) settings_spec = ('No options here.', '', ()) settings_defaults = {} output = None def __init__(self): writers.Writer.__init__(self) def translate(self): visitor = TextTranslator(self.document) self.document.walkabout(visitor) self.output = visitor.body class TextTranslator(nodes.NodeVisitor): sectionchars = '*=-~"+`' def __init__(self, document): nodes.NodeVisitor.__init__(self, document) self.nl = os.linesep self.states = [[]] self.stateindent = [0] self.list_counter = [] self.sectionlevel = 0 self.table = None def add_text(self, text): self.states[-1].append((-1, text)) def new_state(self, indent=STDINDENT): self.states.append([]) self.stateindent.append(indent) def end_state(self, wrap=True, end=[''], first=None): content = self.states.pop() maxindent = sum(self.stateindent) indent = self.stateindent.pop() result = [] toformat = [] def do_format(): if not toformat: return if wrap: res = my_wrap(''.join(toformat), width=MAXWIDTH-maxindent) else: res = ''.join(toformat).splitlines() if end: res += end result.append((indent, res)) for itemindent, item in content: if itemindent == -1: toformat.append(item) else: do_format() result.append((indent + itemindent, item)) toformat = [] do_format() if first is not None and result: itemindent, item = result[0] if item: result.insert(0, (itemindent - indent, [first + item[0]])) result[1] = (itemindent, item[1:]) self.states[-1].extend(result) def visit_document(self, node): self.new_state(0) def depart_document(self, node): self.end_state() self.body = self.nl.join(line and (' '*indent + line) for indent, lines in self.states[0] for line in lines) # XXX header/footer? def visit_highlightlang(self, node): raise nodes.SkipNode def visit_section(self, node): self._title_char = self.sectionchars[self.sectionlevel] self.sectionlevel += 1 def depart_section(self, node): self.sectionlevel -= 1 def visit_topic(self, node): self.new_state(0) def depart_topic(self, node): self.end_state() visit_sidebar = visit_topic depart_sidebar = depart_topic def visit_rubric(self, node): self.new_state(0) self.add_text('-[ ') def depart_rubric(self, node): self.add_text(' ]-') self.end_state() def visit_compound(self, node): pass def depart_compound(self, node): pass def visit_glossary(self, node): pass def depart_glossary(self, node): pass def visit_title(self, node): if isinstance(node.parent, nodes.Admonition): self.add_text(node.astext()+': ') raise nodes.SkipNode self.new_state(0) def depart_title(self, node): if isinstance(node.parent, nodes.section): char = self._title_char else: char = '^' text = ''.join(x[1] for x in self.states.pop() if x[0] == -1) self.stateindent.pop() self.states[-1].append((0, ['', text, '%s' % (char * len(text)), ''])) def visit_subtitle(self, node): pass def depart_subtitle(self, node): pass def visit_attribution(self, node): self.add_text('-- ') def depart_attribution(self, node): pass def visit_desc(self, node): pass def depart_desc(self, node): pass def visit_desc_signature(self, node): self.new_state(0) if node.parent['objtype'] in ('class', 'exception'): self.add_text('%s ' % node.parent['objtype']) def depart_desc_signature(self, node): # XXX: wrap signatures in a way that makes sense self.end_state(wrap=False, end=None) def visit_desc_name(self, node): pass def depart_desc_name(self, node): pass def visit_desc_addname(self, node): pass def depart_desc_addname(self, node): pass def visit_desc_type(self, node): pass def depart_desc_type(self, node): pass def visit_desc_returns(self, node): self.add_text(' -> ') def depart_desc_returns(self, node): pass def visit_desc_parameterlist(self, node): self.add_text('(') self.first_param = 1 def depart_desc_parameterlist(self, node): self.add_text(')') def visit_desc_parameter(self, node): if not self.first_param: self.add_text(', ') else: self.first_param = 0 self.add_text(node.astext()) raise nodes.SkipNode def visit_desc_optional(self, node): self.add_text('[') def depart_desc_optional(self, node): self.add_text(']') def visit_desc_annotation(self, node): pass def depart_desc_annotation(self, node): pass def visit_refcount(self, node): pass def depart_refcount(self, node): pass def visit_desc_content(self, node): self.new_state() self.add_text(self.nl) def depart_desc_content(self, node): self.end_state() def visit_figure(self, node): self.new_state() def depart_figure(self, node): self.end_state() def visit_caption(self, node): pass def depart_caption(self, node): pass def visit_productionlist(self, node): self.new_state() names = [] for production in node: names.append(production['tokenname']) maxlen = max(len(name) for name in names) for production in node: if production['tokenname']: self.add_text(production['tokenname'].ljust(maxlen) + ' ::=') lastname = production['tokenname'] else: self.add_text('%s ' % (' '*len(lastname))) self.add_text(production.astext() + self.nl) self.end_state(wrap=False) raise nodes.SkipNode def visit_seealso(self, node): self.new_state() def depart_seealso(self, node): self.end_state(first='') def visit_footnote(self, node): self._footnote = node.children[0].astext().strip() self.new_state(len(self._footnote) + 3) def depart_footnote(self, node): self.end_state(first='[%s] ' % self._footnote) def visit_citation(self, node): if len(node) and isinstance(node[0], nodes.label): self._citlabel = node[0].astext() else: self._citlabel = '' self.new_state(len(self._citlabel) + 3) def depart_citation(self, node): self.end_state(first='[%s] ' % self._citlabel) def visit_label(self, node): raise nodes.SkipNode # XXX: option list could use some better styling def visit_option_list(self, node): pass def depart_option_list(self, node): pass def visit_option_list_item(self, node): self.new_state(0) def depart_option_list_item(self, node): self.end_state() def visit_option_group(self, node): self._firstoption = True def depart_option_group(self, node): self.add_text(' ') def visit_option(self, node): if self._firstoption: self._firstoption = False else: self.add_text(', ') def depart_option(self, node): pass def visit_option_string(self, node): pass def depart_option_string(self, node): pass def visit_option_argument(self, node): self.add_text(node['delimiter']) def depart_option_argument(self, node): pass def visit_description(self, node): pass def depart_description(self, node): pass def visit_tabular_col_spec(self, node): raise nodes.SkipNode def visit_colspec(self, node): self.table[0].append(node['colwidth']) raise nodes.SkipNode def visit_tgroup(self, node): pass def depart_tgroup(self, node): pass def visit_thead(self, node): pass def depart_thead(self, node): pass def visit_tbody(self, node): self.table.append('sep') def depart_tbody(self, node): pass def visit_row(self, node): self.table.append([]) def depart_row(self, node): pass def visit_entry(self, node): if 'morerows' in node or 'morecols' in node: raise NotImplementedError('Column or row spanning cells are ' 'not implemented.') self.new_state(0) def depart_entry(self, node): text = self.nl.join(self.nl.join(x[1]) for x in self.states.pop()) self.stateindent.pop() self.table[-1].append(text) def visit_table(self, node): if self.table: raise NotImplementedError('Nested tables are not supported.') self.new_state(0) self.table = [[]] def depart_table(self, node): lines = self.table[1:] fmted_rows = [] colwidths = self.table[0] realwidths = colwidths[:] separator = 0 # don't allow paragraphs in table cells for now for line in lines: if line == 'sep': separator = len(fmted_rows) else: cells = [] for i, cell in enumerate(line): par = my_wrap(cell, width=colwidths[i]) if par: maxwidth = max(map(len, par)) else: maxwidth = 0 realwidths[i] = max(realwidths[i], maxwidth) cells.append(par) fmted_rows.append(cells) def writesep(char='-'): out = ['+'] for width in realwidths: out.append(char * (width+2)) out.append('+') self.add_text(''.join(out) + self.nl) def writerow(row): lines = zip(*row) for line in lines: out = ['|'] for i, cell in enumerate(line): if cell: out.append(' ' + cell.ljust(realwidths[i]+1)) else: out.append(' ' * (realwidths[i] + 2)) out.append('|') self.add_text(''.join(out) + self.nl) for i, row in enumerate(fmted_rows): if separator and i == separator: writesep('=') else: writesep('-') writerow(row) writesep('-') self.table = None self.end_state(wrap=False) def visit_acks(self, node): self.new_state(0) self.add_text( ', '.join(n.astext() for n in node.children[0].children) + '.') self.end_state() raise nodes.SkipNode def visit_image(self, node): if 'alt' in node.attributes: self.add_text(_('[image: %s]') % node['alt']) self.add_text(_('[image]')) raise nodes.SkipNode def visit_transition(self, node): indent = sum(self.stateindent) self.new_state(0) self.add_text('=' * (MAXWIDTH - indent)) self.end_state() raise nodes.SkipNode def visit_bullet_list(self, node): self.list_counter.append(-1) def depart_bullet_list(self, node): self.list_counter.pop() def visit_enumerated_list(self, node): self.list_counter.append(0) def depart_enumerated_list(self, node): self.list_counter.pop() def visit_definition_list(self, node): self.list_counter.append(-2) def depart_definition_list(self, node): self.list_counter.pop() def visit_list_item(self, node): if self.list_counter[-1] == -1: # bullet list self.new_state(2) elif self.list_counter[-1] == -2: # definition list pass else: # enumerated list self.list_counter[-1] += 1 self.new_state(len(str(self.list_counter[-1])) + 2) def depart_list_item(self, node): if self.list_counter[-1] == -1: self.end_state(first='* ', end=None) elif self.list_counter[-1] == -2: pass else: self.end_state(first='%s. ' % self.list_counter[-1], end=None) def visit_definition_list_item(self, node): self._li_has_classifier = len(node) >= 2 and \ isinstance(node[1], nodes.classifier) def depart_definition_list_item(self, node): pass def visit_term(self, node): self.new_state(0) def depart_term(self, node): if not self._li_has_classifier: self.end_state(end=None) def visit_termsep(self, node): self.add_text(', ') raise nodes.SkipNode def visit_classifier(self, node): self.add_text(' : ') def depart_classifier(self, node): self.end_state(end=None) def visit_definition(self, node): self.new_state() def depart_definition(self, node): self.end_state() def visit_field_list(self, node): pass def depart_field_list(self, node): pass def visit_field(self, node): pass def depart_field(self, node): pass def visit_field_name(self, node): self.new_state(0) def depart_field_name(self, node): self.add_text(':') self.end_state(end=None) def visit_field_body(self, node): self.new_state() def depart_field_body(self, node): self.end_state() def visit_centered(self, node): pass def depart_centered(self, node): pass def visit_hlist(self, node): pass def depart_hlist(self, node): pass def visit_hlistcol(self, node): pass def depart_hlistcol(self, node): pass def visit_admonition(self, node): self.new_state(0) def depart_admonition(self, node): self.end_state() def visit_versionmodified(self, node): self.new_state(0) def depart_versionmodified(self, node): self.end_state() def visit_literal_block(self, node): self.new_state() def depart_literal_block(self, node): self.end_state(wrap=False) def visit_doctest_block(self, node): self.new_state(0) def depart_doctest_block(self, node): self.end_state(wrap=False) def visit_line_block(self, node): self.new_state(0) def depart_line_block(self, node): self.end_state(wrap=False) def visit_line(self, node): pass def depart_line(self, node): pass def visit_block_quote(self, node): self.new_state() def depart_block_quote(self, node): self.end_state() def visit_compact_paragraph(self, node): pass def depart_compact_paragraph(self, node): pass def visit_paragraph(self, node): self.new_state(0) def depart_paragraph(self, node): self.end_state() def visit_target(self, node): raise nodes.SkipNode def visit_index(self, node): raise nodes.SkipNode def visit_substitution_definition(self, node): raise nodes.SkipNode def visit_pending_xref(self, node): pass def depart_pending_xref(self, node): pass def visit_reference(self, node): pass def depart_reference(self, node): pass def visit_download_reference(self, node): pass def depart_download_reference(self, node): pass def visit_emphasis(self, node): self.add_text('*') def depart_emphasis(self, node): self.add_text('*') def visit_literal_emphasis(self, node): self.add_text('*') def depart_literal_emphasis(self, node): self.add_text('*') def visit_strong(self, node): self.add_text('**') def depart_strong(self, node): self.add_text('**') def visit_abbreviation(self, node): self.add_text('') def depart_abbreviation(self, node): if node.hasattr('explanation'): self.add_text(' (%s)' % node['explanation']) def visit_title_reference(self, node): self.add_text('*') def depart_title_reference(self, node): self.add_text('*') def visit_literal(self, node): self.add_text('"') def depart_literal(self, node): self.add_text('"') def visit_subscript(self, node): self.add_text('_') def depart_subscript(self, node): pass def visit_superscript(self, node): self.add_text('^') def depart_superscript(self, node): pass def visit_footnote_reference(self, node): self.add_text('[%s]' % node.astext()) raise nodes.SkipNode def visit_citation_reference(self, node): self.add_text('[%s]' % node.astext()) raise nodes.SkipNode def visit_Text(self, node): self.add_text(node.astext()) def depart_Text(self, node): pass def visit_generated(self, node): pass def depart_generated(self, node): pass def visit_inline(self, node): pass def depart_inline(self, node): pass def visit_problematic(self, node): self.add_text('>>') def depart_problematic(self, node): self.add_text('<<') def visit_system_message(self, node): self.new_state(0) self.add_text('' % node.astext()) self.end_state() raise nodes.SkipNode def visit_comment(self, node): raise nodes.SkipNode def visit_meta(self, node): # only valid for HTML raise nodes.SkipNode def visit_raw(self, node): if 'text' in node.get('format', '').split(): self.body.append(node.astext()) raise nodes.SkipNode def _visit_admonition(self, node): self.new_state(2) def _make_depart_admonition(name): def depart_admonition(self, node): self.end_state(first=name.capitalize() + ': ') return depart_admonition visit_attention = _visit_admonition depart_attention = _make_depart_admonition('attention') visit_caution = _visit_admonition depart_caution = _make_depart_admonition('caution') visit_danger = _visit_admonition depart_danger = _make_depart_admonition('danger') visit_error = _visit_admonition depart_error = _make_depart_admonition('error') visit_hint = _visit_admonition depart_hint = _make_depart_admonition('hint') visit_important = _visit_admonition depart_important = _make_depart_admonition('important') visit_note = _visit_admonition depart_note = _make_depart_admonition('note') visit_tip = _visit_admonition depart_tip = _make_depart_admonition('tip') visit_warning = _visit_admonition depart_warning = _make_depart_admonition('warning') def unknown_visit(self, node): raise NotImplementedError('Unknown node: ' + node.__class__.__name__) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/clidocs.py0000644000000000000000000007107400000000000016371 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging import os from botocore import xform_name from botocore.model import StringShape from botocore.utils import is_json_value_header from awscli import SCALAR_TYPES from awscli.argprocess import ParamShorthandDocGen from awscli.bcdoc.docevents import DOC_EVENTS from awscli.topictags import TopicTagDB from awscli.utils import ( find_service_and_method_in_event_name, is_document_type, operation_uses_document_types ) LOG = logging.getLogger(__name__) class CLIDocumentEventHandler(object): def __init__(self, help_command): self.help_command = help_command self.register(help_command.session, help_command.event_class) self._arg_groups = self._build_arg_table_groups(help_command) self._documented_arg_groups = [] def _build_arg_table_groups(self, help_command): arg_groups = {} for name, arg in help_command.arg_table.items(): if arg.group_name is not None: arg_groups.setdefault(arg.group_name, []).append(arg) return arg_groups def _get_argument_type_name(self, shape, default): if is_json_value_header(shape): return 'JSON' if is_document_type(shape): return 'document' return default def _map_handlers(self, session, event_class, mapfn): for event in DOC_EVENTS: event_handler_name = event.replace('-', '_') if hasattr(self, event_handler_name): event_handler = getattr(self, event_handler_name) format_string = DOC_EVENTS[event] num_args = len(format_string.split('.')) - 2 format_args = (event_class,) + ('*',) * num_args event_string = event + format_string % format_args unique_id = event_class + event_handler_name mapfn(event_string, event_handler, unique_id) def register(self, session, event_class): """ The default register iterates through all of the available document events and looks for a corresponding handler method defined in the object. If it's there, that handler method will be registered for the all events of that type for the specified ``event_class``. """ self._map_handlers(session, event_class, session.register) def unregister(self): """ The default unregister iterates through all of the available document events and looks for a corresponding handler method defined in the object. If it's there, that handler method will be unregistered for the all events of that type for the specified ``event_class``. """ self._map_handlers(self.help_command.session, self.help_command.event_class, self.help_command.session.unregister) # These are default doc handlers that apply in the general case. def doc_breadcrumbs(self, help_command, **kwargs): doc = help_command.doc if doc.target != 'man': cmd_names = help_command.event_class.split('.') doc.write('[ ') doc.write(':ref:`aws `') full_cmd_list = ['aws'] for cmd in cmd_names[:-1]: doc.write(' . ') full_cmd_list.append(cmd) full_cmd_name = ' '.join(full_cmd_list) doc.write(':ref:`%s `' % (cmd, full_cmd_name)) doc.write(' ]') def doc_title(self, help_command, **kwargs): doc = help_command.doc doc.style.new_paragraph() reference = help_command.event_class.replace('.', ' ') if reference != 'aws': reference = 'aws ' + reference doc.writeln('.. _cli:%s:' % reference) doc.style.h1(help_command.name) def doc_description(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Description') doc.include_doc_string(help_command.description) doc.style.new_paragraph() def doc_synopsis_start(self, help_command, **kwargs): self._documented_arg_groups = [] doc = help_command.doc doc.style.h2('Synopsis') doc.style.start_codeblock() doc.writeln('%s' % help_command.name) def doc_synopsis_option(self, arg_name, help_command, **kwargs): doc = help_command.doc argument = help_command.arg_table[arg_name] if argument.group_name in self._arg_groups: if argument.group_name in self._documented_arg_groups: # This arg is already documented so we can move on. return option_str = ' | '.join( [a.cli_name for a in self._arg_groups[argument.group_name]]) self._documented_arg_groups.append(argument.group_name) elif argument.cli_name.startswith('--'): option_str = '%s ' % argument.cli_name else: option_str = '<%s>' % argument.cli_name if not (argument.required or getattr(argument, '_DOCUMENT_AS_REQUIRED', False)): option_str = '[%s]' % option_str doc.writeln('%s' % option_str) def doc_synopsis_end(self, help_command, **kwargs): doc = help_command.doc doc.style.end_codeblock() # Reset the documented arg groups for other sections # that may document args (the detailed docs following # the synopsis). self._documented_arg_groups = [] def doc_options_start(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Options') if not help_command.arg_table: doc.write('*None*\n') def doc_option(self, arg_name, help_command, **kwargs): doc = help_command.doc argument = help_command.arg_table[arg_name] if argument.group_name in self._arg_groups: if argument.group_name in self._documented_arg_groups: # This arg is already documented so we can move on. return name = ' | '.join( ['``%s``' % a.cli_name for a in self._arg_groups[argument.group_name]]) self._documented_arg_groups.append(argument.group_name) else: name = '``%s``' % argument.cli_name doc.write('%s (%s)\n' % (name, self._get_argument_type_name( argument.argument_model, argument.cli_type_name))) doc.style.indent() doc.include_doc_string(argument.documentation) if hasattr(argument, 'argument_model'): self._document_enums(argument.argument_model, doc) self._document_nested_structure(argument.argument_model, doc) doc.style.dedent() doc.style.new_paragraph() def doc_relateditems_start(self, help_command, **kwargs): if help_command.related_items: doc = help_command.doc doc.style.h2('See Also') def doc_relateditem(self, help_command, related_item, **kwargs): doc = help_command.doc doc.write('* ') doc.style.sphinx_reference_label( label='cli:%s' % related_item, text=related_item ) doc.write('\n') def _document_enums(self, model, doc): """Documents top-level parameter enums""" if isinstance(model, StringShape): if model.enum: doc.style.new_paragraph() doc.write('Possible values:') doc.style.start_ul() for enum in model.enum: doc.style.li('``%s``' % enum) doc.style.end_ul() def _document_nested_structure(self, model, doc): """Recursively documents parameters in nested structures""" member_type_name = getattr(model, 'type_name', None) if member_type_name == 'structure': for member_name, member_shape in model.members.items(): self._doc_member(doc, member_name, member_shape, stack=[model.name]) elif member_type_name == 'list': self._doc_member(doc, '', model.member, stack=[model.name]) elif member_type_name == 'map': key_shape = model.key key_name = key_shape.serialization.get('name', 'key') self._doc_member(doc, key_name, key_shape, stack=[model.name]) value_shape = model.value value_name = value_shape.serialization.get('name', 'value') self._doc_member(doc, value_name, value_shape, stack=[model.name]) def _doc_member(self, doc, member_name, member_shape, stack): if member_shape.name in stack: # Document the recursion once, otherwise just # note the fact that it's recursive and return. if stack.count(member_shape.name) > 1: if member_shape.type_name == 'structure': doc.write('( ... recursive ... )') return stack.append(member_shape.name) try: self._do_doc_member(doc, member_name, member_shape, stack) finally: stack.pop() def _do_doc_member(self, doc, member_name, member_shape, stack): docs = member_shape.documentation type_name = self._get_argument_type_name( member_shape, member_shape.type_name) if member_name: doc.write('%s -> (%s)' % (member_name, type_name)) else: doc.write('(%s)' % type_name) doc.style.indent() doc.style.new_paragraph() doc.include_doc_string(docs) doc.style.new_paragraph() member_type_name = member_shape.type_name if member_type_name == 'structure': for sub_name, sub_shape in member_shape.members.items(): self._doc_member(doc, sub_name, sub_shape, stack) elif member_type_name == 'map': key_shape = member_shape.key key_name = key_shape.serialization.get('name', 'key') self._doc_member(doc, key_name, key_shape, stack) value_shape = member_shape.value value_name = value_shape.serialization.get('name', 'value') self._doc_member(doc, value_name, value_shape, stack) elif member_type_name == 'list': self._doc_member(doc, '', member_shape.member, stack) doc.style.dedent() doc.style.new_paragraph() class ProviderDocumentEventHandler(CLIDocumentEventHandler): def doc_breadcrumbs(self, help_command, event_name, **kwargs): pass def doc_synopsis_start(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Synopsis') doc.style.codeblock(help_command.synopsis) doc.include_doc_string(help_command.help_usage) def doc_synopsis_option(self, arg_name, help_command, **kwargs): pass def doc_synopsis_end(self, help_command, **kwargs): doc = help_command.doc doc.style.new_paragraph() def doc_options_start(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Options') def doc_option(self, arg_name, help_command, **kwargs): doc = help_command.doc argument = help_command.arg_table[arg_name] doc.writeln('``%s`` (%s)' % (argument.cli_name, argument.cli_type_name)) doc.include_doc_string(argument.documentation) if argument.choices: doc.style.start_ul() for choice in argument.choices: doc.style.li(choice) doc.style.end_ul() def doc_subitems_start(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Available Services') doc.style.toctree() def doc_subitem(self, command_name, help_command, **kwargs): doc = help_command.doc file_name = '%s/index' % command_name doc.style.tocitem(command_name, file_name=file_name) class ServiceDocumentEventHandler(CLIDocumentEventHandler): # A service document has no synopsis. def doc_synopsis_start(self, help_command, **kwargs): pass def doc_synopsis_option(self, arg_name, help_command, **kwargs): pass def doc_synopsis_end(self, help_command, **kwargs): pass # A service document has no option section. def doc_options_start(self, help_command, **kwargs): pass def doc_option(self, arg_name, help_command, **kwargs): pass def doc_option_example(self, arg_name, help_command, **kwargs): pass def doc_options_end(self, help_command, **kwargs): pass def doc_description(self, help_command, **kwargs): doc = help_command.doc service_model = help_command.obj doc.style.h2('Description') # TODO: need a documentation attribute. doc.include_doc_string(service_model.documentation) def doc_subitems_start(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Available Commands') doc.style.toctree() def doc_subitem(self, command_name, help_command, **kwargs): doc = help_command.doc subcommand = help_command.command_table[command_name] subcommand_table = getattr(subcommand, 'subcommand_table', {}) # If the subcommand table has commands in it, # direct the subitem to the command's index because # it has more subcommands to be documented. if (len(subcommand_table) > 0): file_name = '%s/index' % command_name doc.style.tocitem(command_name, file_name=file_name) else: doc.style.tocitem(command_name) class OperationDocumentEventHandler(CLIDocumentEventHandler): AWS_DOC_BASE = 'https://docs.aws.amazon.com/goto/WebAPI' def doc_description(self, help_command, **kwargs): doc = help_command.doc operation_model = help_command.obj doc.style.h2('Description') doc.include_doc_string(operation_model.documentation) self._add_webapi_crosslink(help_command) self._add_top_level_args_reference(help_command) self._add_note_for_document_types_if_used(help_command) def _add_top_level_args_reference(self, help_command): help_command.doc.writeln('') help_command.doc.write("See ") help_command.doc.style.internal_link( title="'aws help'", page='/reference/index' ) help_command.doc.writeln(' for descriptions of global parameters.') def _add_webapi_crosslink(self, help_command): doc = help_command.doc operation_model = help_command.obj service_model = operation_model.service_model service_uid = service_model.metadata.get('uid') if service_uid is None: # If there's no service_uid in the model, we can't # be certain if the generated cross link will work # so we don't generate any crosslink info. return doc.style.new_paragraph() doc.write("See also: ") link = '%s/%s/%s' % (self.AWS_DOC_BASE, service_uid, operation_model.name) doc.style.external_link(title="AWS API Documentation", link=link) doc.writeln('') def _add_note_for_document_types_if_used(self, help_command): if operation_uses_document_types(help_command.obj): help_command.doc.style.new_paragraph() help_command.doc.writeln( '``%s`` uses document type values. Document types follow the ' 'JSON data model where valid values are: strings, numbers, ' 'booleans, null, arrays, and objects. For command input, ' 'options and nested parameters that are labeled with the type ' '``document`` must be provided as JSON. Shorthand syntax does ' 'not support document types.' % help_command.name ) def _json_example_value_name(self, argument_model, include_enum_values=True): # If include_enum_values is True, then the valid enum values # are included as the sample JSON value. if isinstance(argument_model, StringShape): if argument_model.enum and include_enum_values: choices = argument_model.enum return '|'.join(['"%s"' % c for c in choices]) else: return '"string"' elif argument_model.type_name == 'boolean': return 'true|false' else: return '%s' % argument_model.type_name def _json_example(self, doc, argument_model, stack): if argument_model.name in stack: # Document the recursion once, otherwise just # note the fact that it's recursive and return. if stack.count(argument_model.name) > 1: if argument_model.type_name == 'structure': doc.write('{ ... recursive ... }') return stack.append(argument_model.name) try: self._do_json_example(doc, argument_model, stack) finally: stack.pop() def _do_json_example(self, doc, argument_model, stack): if argument_model.type_name == 'list': doc.write('[') if argument_model.member.type_name in SCALAR_TYPES: doc.write('%s, ...' % self._json_example_value_name(argument_model.member)) else: doc.style.indent() doc.style.new_line() self._json_example(doc, argument_model.member, stack) doc.style.new_line() doc.write('...') doc.style.dedent() doc.style.new_line() doc.write(']') elif argument_model.type_name == 'map': doc.write('{') doc.style.indent() key_string = self._json_example_value_name(argument_model.key) doc.write('%s: ' % key_string) if argument_model.value.type_name in SCALAR_TYPES: doc.write(self._json_example_value_name(argument_model.value)) else: doc.style.indent() self._json_example(doc, argument_model.value, stack) doc.style.dedent() doc.style.new_line() doc.write('...') doc.style.dedent() doc.write('}') elif argument_model.type_name == 'structure': if argument_model.is_document_type: self._doc_document_member(doc) else: self._doc_input_structure_members(doc, argument_model, stack) def _doc_document_member(self, doc): doc.write('{...}') def _doc_input_structure_members(self, doc, argument_model, stack): doc.write('{') doc.style.indent() doc.style.new_line() members = argument_model.members for i, member_name in enumerate(members): member_model = members[member_name] member_type_name = member_model.type_name if member_type_name in SCALAR_TYPES: doc.write('"%s": %s' % (member_name, self._json_example_value_name(member_model))) elif member_type_name == 'structure': doc.write('"%s": ' % member_name) self._json_example(doc, member_model, stack) elif member_type_name == 'map': doc.write('"%s": ' % member_name) self._json_example(doc, member_model, stack) elif member_type_name == 'list': doc.write('"%s": ' % member_name) self._json_example(doc, member_model, stack) if i < len(members) - 1: doc.write(',') doc.style.new_line() doc.style.dedent() doc.style.new_line() doc.write('}') def doc_option_example(self, arg_name, help_command, event_name, **kwargs): service_id, operation_name = \ find_service_and_method_in_event_name(event_name) doc = help_command.doc cli_argument = help_command.arg_table[arg_name] if cli_argument.group_name in self._arg_groups: if cli_argument.group_name in self._documented_arg_groups: # Args with group_names (boolean args) don't # need to generate example syntax. return argument_model = cli_argument.argument_model docgen = ParamShorthandDocGen() if docgen.supports_shorthand(cli_argument.argument_model): example_shorthand_syntax = docgen.generate_shorthand_example( cli_argument, service_id, operation_name) if example_shorthand_syntax is None: # If the shorthand syntax returns a value of None, # this indicates to us that there is no example # needed for this param so we can immediately # return. return if example_shorthand_syntax: doc.style.new_paragraph() doc.write('Shorthand Syntax') doc.style.start_codeblock() for example_line in example_shorthand_syntax.splitlines(): doc.writeln(example_line) doc.style.end_codeblock() if argument_model is not None and argument_model.type_name == 'list' and \ argument_model.member.type_name in SCALAR_TYPES: # A list of scalars is special. While you *can* use # JSON ( ["foo", "bar", "baz"] ), you can also just # use the argparse behavior of space separated lists. # "foo" "bar" "baz". In fact we don't even want to # document the JSON syntax in this case. member = argument_model.member doc.style.new_paragraph() doc.write('Syntax') doc.style.start_codeblock() example_type = self._json_example_value_name( member, include_enum_values=False) doc.write('%s %s ...' % (example_type, example_type)) if isinstance(member, StringShape) and member.enum: # If we have enum values, we can tell the user # exactly what valid values they can provide. self._write_valid_enums(doc, member.enum) doc.style.end_codeblock() doc.style.new_paragraph() elif cli_argument.cli_type_name not in SCALAR_TYPES: doc.style.new_paragraph() doc.write('JSON Syntax') doc.style.start_codeblock() self._json_example(doc, argument_model, stack=[]) doc.style.end_codeblock() doc.style.new_paragraph() def _write_valid_enums(self, doc, enum_values): doc.style.new_paragraph() doc.write("Where valid values are:\n") for value in enum_values: doc.write(" %s\n" % value) doc.write("\n") def doc_output(self, help_command, event_name, **kwargs): doc = help_command.doc doc.style.h2('Output') operation_model = help_command.obj output_shape = operation_model.output_shape if output_shape is None or not output_shape.members: doc.write('None') else: for member_name, member_shape in output_shape.members.items(): self._doc_member(doc, member_name, member_shape, stack=[]) def doc_options_end(self, help_command, **kwargs): self._add_top_level_args_reference(help_command) class TopicListerDocumentEventHandler(CLIDocumentEventHandler): DESCRIPTION = ( 'This is the AWS CLI Topic Guide. It gives access to a set ' 'of topics that provide a deeper understanding of the CLI. To access ' 'the list of topics from the command line, run ``aws help topics``. ' 'To access a specific topic from the command line, run ' '``aws help [topicname]``, where ``topicname`` is the name of the ' 'topic as it appears in the output from ``aws help topics``.') def __init__(self, help_command): self.help_command = help_command self.register(help_command.session, help_command.event_class) self._topic_tag_db = TopicTagDB() self._topic_tag_db.load_json_index() def doc_breadcrumbs(self, help_command, **kwargs): doc = help_command.doc if doc.target != 'man': doc.write('[ ') doc.style.sphinx_reference_label(label='cli:aws', text='aws') doc.write(' ]') def doc_title(self, help_command, **kwargs): doc = help_command.doc doc.style.new_paragraph() doc.style.link_target_definition( refname='cli:aws help %s' % self.help_command.name, link='') doc.style.h1('AWS CLI Topic Guide') def doc_description(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Description') doc.include_doc_string(self.DESCRIPTION) doc.style.new_paragraph() def doc_synopsis_start(self, help_command, **kwargs): pass def doc_synopsis_end(self, help_command, **kwargs): pass def doc_options_start(self, help_command, **kwargs): pass def doc_options_end(self, help_command, **kwargs): pass def doc_subitems_start(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Available Topics') categories = self._topic_tag_db.query('category') topic_names = self._topic_tag_db.get_all_topic_names() # Sort the categories category_names = sorted(categories.keys()) for category_name in category_names: doc.style.h3(category_name) doc.style.new_paragraph() # Write out the topic and a description for each topic under # each category. for topic_name in sorted(categories[category_name]): description = self._topic_tag_db.get_tag_single_value( topic_name, 'description') doc.write('* ') doc.style.sphinx_reference_label( label='cli:aws help %s' % topic_name, text=topic_name ) doc.write(': %s\n' % description) # Add a hidden toctree to make sure everything is connected in # the document. doc.style.hidden_toctree() for topic_name in topic_names: doc.style.hidden_tocitem(topic_name) class TopicDocumentEventHandler(TopicListerDocumentEventHandler): def doc_breadcrumbs(self, help_command, **kwargs): doc = help_command.doc if doc.target != 'man': doc.write('[ ') doc.style.sphinx_reference_label(label='cli:aws', text='aws') doc.write(' . ') doc.style.sphinx_reference_label( label='cli:aws help topics', text='topics' ) doc.write(' ]') def doc_title(self, help_command, **kwargs): doc = help_command.doc doc.style.new_paragraph() doc.style.link_target_definition( refname='cli:aws help %s' % self.help_command.name, link='') title = self._topic_tag_db.get_tag_single_value( help_command.name, 'title') doc.style.h1(title) def doc_description(self, help_command, **kwargs): doc = help_command.doc topic_filename = os.path.join(self._topic_tag_db.topic_dir, help_command.name + '.rst') contents = self._remove_tags_from_content(topic_filename) doc.writeln(contents) doc.style.new_paragraph() def _remove_tags_from_content(self, filename): with open(filename, 'r') as f: lines = f.readlines() content_begin_index = 0 for i, line in enumerate(lines): # If a line is encountered that does not begin with the tag # end the search for tags and mark where tags end. if not self._line_has_tag(line): content_begin_index = i break # Join all of the non-tagged lines back together. return ''.join(lines[content_begin_index:]) def _line_has_tag(self, line): for tag in self._topic_tag_db.valid_tags: if line.startswith(':' + tag + ':'): return True return False def doc_subitems_start(self, help_command, **kwargs): pass ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/clidriver.py0000644000000000000000000006470300000000000016735 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys import signal import logging import botocore.session from botocore import __version__ as botocore_version from botocore.hooks import HierarchicalEmitter from botocore import xform_name from botocore.compat import copy_kwargs, OrderedDict from botocore.exceptions import NoCredentialsError from botocore.exceptions import NoRegionError from botocore.exceptions import ProfileNotFound from botocore.history import get_global_history_recorder from awscli import EnvironmentVariables, __version__ from awscli.compat import get_stderr_text_writer from awscli.formatter import get_formatter from awscli.plugin import load_plugins from awscli.commands import CLICommand from awscli.compat import six from awscli.argparser import MainArgParser from awscli.argparser import ServiceArgParser from awscli.argparser import ArgTableArgParser from awscli.argparser import USAGE from awscli.help import ProviderHelpCommand from awscli.help import ServiceHelpCommand from awscli.help import OperationHelpCommand from awscli.arguments import CustomArgument from awscli.arguments import ListArgument from awscli.arguments import BooleanArgument from awscli.arguments import CLIArgument from awscli.arguments import UnknownArgumentError from awscli.argprocess import unpack_argument from awscli.alias import AliasLoader from awscli.alias import AliasCommandInjector from awscli.utils import emit_top_level_args_parsed_event from awscli.utils import write_exception LOG = logging.getLogger('awscli.clidriver') LOG_FORMAT = ( '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s') HISTORY_RECORDER = get_global_history_recorder() # Don't remove this line. The idna encoding # is used by getaddrinfo when dealing with unicode hostnames, # and in some cases, there appears to be a race condition # where threads will get a LookupError on getaddrinfo() saying # that the encoding doesn't exist. Using the idna encoding before # running any CLI code (and any threads it may create) ensures that # the encodings.idna is imported and registered in the codecs registry, # which will stop the LookupErrors from happening. # See: https://bugs.python.org/issue29288 u''.encode('idna') def main(): driver = create_clidriver() rc = driver.main() HISTORY_RECORDER.record('CLI_RC', rc, 'CLI') return rc def create_clidriver(): session = botocore.session.Session(EnvironmentVariables) _set_user_agent_for_session(session) load_plugins(session.full_config.get('plugins', {}), event_hooks=session.get_component('event_emitter')) driver = CLIDriver(session=session) return driver def _set_user_agent_for_session(session): session.user_agent_name = 'aws-cli' session.user_agent_version = __version__ session.user_agent_extra = 'botocore/%s' % botocore_version class CLIDriver(object): def __init__(self, session=None): if session is None: self.session = botocore.session.get_session(EnvironmentVariables) _set_user_agent_for_session(self.session) else: self.session = session self._cli_data = None self._command_table = None self._argument_table = None self.alias_loader = AliasLoader() def _get_cli_data(self): # Not crazy about this but the data in here is needed in # several places (e.g. MainArgParser, ProviderHelp) so # we load it here once. if self._cli_data is None: self._cli_data = self.session.get_data('cli') return self._cli_data def _get_command_table(self): if self._command_table is None: self._command_table = self._build_command_table() return self._command_table def _get_argument_table(self): if self._argument_table is None: self._argument_table = self._build_argument_table() return self._argument_table def _build_command_table(self): """ Create the main parser to handle the global arguments. :rtype: ``argparser.ArgumentParser`` :return: The parser object """ command_table = self._build_builtin_commands(self.session) self.session.emit('building-command-table.main', command_table=command_table, session=self.session, command_object=self) return command_table def _build_builtin_commands(self, session): commands = OrderedDict() services = session.get_available_services() for service_name in services: commands[service_name] = ServiceCommand(cli_name=service_name, session=self.session, service_name=service_name) return commands def _add_aliases(self, command_table, parser): parser = self._create_parser(command_table) injector = AliasCommandInjector( self.session, self.alias_loader) injector.inject_aliases(command_table, parser) def _build_argument_table(self): argument_table = OrderedDict() cli_data = self._get_cli_data() cli_arguments = cli_data.get('options', None) for option in cli_arguments: option_params = copy_kwargs(cli_arguments[option]) cli_argument = self._create_cli_argument(option, option_params) cli_argument.add_to_arg_table(argument_table) # Then the final step is to send out an event so handlers # can add extra arguments or modify existing arguments. self.session.emit('building-top-level-params', argument_table=argument_table) return argument_table def _create_cli_argument(self, option_name, option_params): return CustomArgument( option_name, help_text=option_params.get('help', ''), dest=option_params.get('dest'), default=option_params.get('default'), action=option_params.get('action'), required=option_params.get('required'), choices=option_params.get('choices'), cli_type_name=option_params.get('type')) def create_help_command(self): cli_data = self._get_cli_data() return ProviderHelpCommand(self.session, self._get_command_table(), self._get_argument_table(), cli_data.get('description', None), cli_data.get('synopsis', None), cli_data.get('help_usage', None)) def _create_parser(self, command_table): # Also add a 'help' command. command_table['help'] = self.create_help_command() cli_data = self._get_cli_data() parser = MainArgParser( command_table, self.session.user_agent(), cli_data.get('description', None), self._get_argument_table(), prog="aws") return parser def main(self, args=None): """ :param args: List of arguments, with the 'aws' removed. For example, the command "aws s3 list-objects --bucket foo" will have an args list of ``['s3', 'list-objects', '--bucket', 'foo']``. """ if args is None: args = sys.argv[1:] command_table = self._get_command_table() parser = self._create_parser(command_table) self._add_aliases(command_table, parser) parsed_args, remaining = parser.parse_known_args(args) try: # Because _handle_top_level_args emits events, it's possible # that exceptions can be raised, which should have the same # general exception handling logic as calling into the # command table. This is why it's in the try/except clause. self._handle_top_level_args(parsed_args) self._emit_session_event(parsed_args) HISTORY_RECORDER.record( 'CLI_VERSION', self.session.user_agent(), 'CLI') HISTORY_RECORDER.record('CLI_ARGUMENTS', args, 'CLI') return command_table[parsed_args.command](remaining, parsed_args) except UnknownArgumentError as e: sys.stderr.write("usage: %s\n" % USAGE) sys.stderr.write(str(e)) sys.stderr.write("\n") return 255 except NoRegionError as e: msg = ('%s You can also configure your region by running ' '"aws configure".' % e) self._show_error(msg) return 255 except NoCredentialsError as e: msg = ('%s. You can configure credentials by running ' '"aws configure".' % e) self._show_error(msg) return 255 except KeyboardInterrupt: # Shell standard for signals that terminate # the process is to return 128 + signum, in this case # SIGINT=2, so we'll have an RC of 130. sys.stdout.write("\n") return 128 + signal.SIGINT except Exception as e: LOG.debug("Exception caught in main()", exc_info=True) LOG.debug("Exiting with rc 255") write_exception(e, outfile=get_stderr_text_writer()) return 255 def _emit_session_event(self, parsed_args): # This event is guaranteed to run after the session has been # initialized and a profile has been set. This was previously # problematic because if something in CLIDriver caused the # session components to be reset (such as session.profile = foo) # then all the prior registered components would be removed. self.session.emit( 'session-initialized', session=self.session, parsed_args=parsed_args) def _show_error(self, msg): LOG.debug(msg, exc_info=True) sys.stderr.write(msg) sys.stderr.write('\n') def _handle_top_level_args(self, args): emit_top_level_args_parsed_event(self.session, args) if args.profile: self.session.set_config_variable('profile', args.profile) if args.region: self.session.set_config_variable('region', args.region) if args.debug: # TODO: # Unfortunately, by setting debug mode here, we miss out # on all of the debug events prior to this such as the # loading of plugins, etc. self.session.set_stream_logger('botocore', logging.DEBUG, format_string=LOG_FORMAT) self.session.set_stream_logger('awscli', logging.DEBUG, format_string=LOG_FORMAT) self.session.set_stream_logger('s3transfer', logging.DEBUG, format_string=LOG_FORMAT) self.session.set_stream_logger('urllib3', logging.DEBUG, format_string=LOG_FORMAT) LOG.debug("CLI version: %s", self.session.user_agent()) LOG.debug("Arguments entered to CLI: %s", sys.argv[1:]) else: self.session.set_stream_logger(logger_name='awscli', log_level=logging.ERROR) class ServiceCommand(CLICommand): """A service command for the CLI. For example, ``aws ec2 ...`` we'd create a ServiceCommand object that represents the ec2 service. """ def __init__(self, cli_name, session, service_name=None): # The cli_name is the name the user types, the name we show # in doc, etc. # The service_name is the name we used internally with botocore. # For example, we have the 's3api' as the cli_name for the service # but this is actually bound to the 's3' service name in botocore, # i.e. we load s3.json from the botocore data dir. Most of # the time these are the same thing but in the case of renames, # we want users/external things to be able to rename the cli name # but *not* the service name, as this has to be exactly what # botocore expects. self._name = cli_name self.session = session self._command_table = None if service_name is None: # Then default to using the cli name. self._service_name = cli_name else: self._service_name = service_name self._lineage = [self] self._service_model = None @property def name(self): return self._name @name.setter def name(self, value): self._name = value @property def service_model(self): return self._get_service_model() @property def lineage(self): return self._lineage @lineage.setter def lineage(self, value): self._lineage = value def _get_command_table(self): if self._command_table is None: self._command_table = self._create_command_table() return self._command_table def _get_service_model(self): if self._service_model is None: try: api_version = self.session.get_config_variable( 'api_versions').get(self._service_name, None) except ProfileNotFound: api_version = None self._service_model = self.session.get_service_model( self._service_name, api_version=api_version) return self._service_model def __call__(self, args, parsed_globals): # Once we know we're trying to call a service for this operation # we can go ahead and create the parser for it. We # can also grab the Service object from botocore. service_parser = self._create_parser() parsed_args, remaining = service_parser.parse_known_args(args) command_table = self._get_command_table() return command_table[parsed_args.operation](remaining, parsed_globals) def _create_command_table(self): command_table = OrderedDict() service_model = self._get_service_model() for operation_name in service_model.operation_names: cli_name = xform_name(operation_name, '-') operation_model = service_model.operation_model(operation_name) command_table[cli_name] = ServiceOperation( name=cli_name, parent_name=self._name, session=self.session, operation_model=operation_model, operation_caller=CLIOperationCaller(self.session), ) self.session.emit('building-command-table.%s' % self._name, command_table=command_table, session=self.session, command_object=self) self._add_lineage(command_table) return command_table def _add_lineage(self, command_table): for command in command_table: command_obj = command_table[command] command_obj.lineage = self.lineage + [command_obj] def create_help_command(self): command_table = self._get_command_table() return ServiceHelpCommand(session=self.session, obj=self._get_service_model(), command_table=command_table, arg_table=None, event_class='.'.join(self.lineage_names), name=self._name) def _create_parser(self): command_table = self._get_command_table() # Also add a 'help' command. command_table['help'] = self.create_help_command() return ServiceArgParser( operations_table=command_table, service_name=self._name) class ServiceOperation(object): """A single operation of a service. This class represents a single operation for a service, for example ``ec2.DescribeInstances``. """ ARG_TYPES = { 'list': ListArgument, 'boolean': BooleanArgument, } DEFAULT_ARG_CLASS = CLIArgument def __init__(self, name, parent_name, operation_caller, operation_model, session): """ :type name: str :param name: The name of the operation/subcommand. :type parent_name: str :param parent_name: The name of the parent command. :type operation_model: ``botocore.model.OperationModel`` :param operation_object: The operation model associated with this subcommand. :type operation_caller: ``CLIOperationCaller`` :param operation_caller: An object that can properly call the operation. :type session: ``botocore.session.Session`` :param session: The session object. """ self._arg_table = None self._name = name # These is used so we can figure out what the proper event # name should be .. self._parent_name = parent_name self._operation_caller = operation_caller self._lineage = [self] self._operation_model = operation_model self._session = session if operation_model.deprecated: self._UNDOCUMENTED = True @property def name(self): return self._name @name.setter def name(self, value): self._name = value @property def lineage(self): return self._lineage @lineage.setter def lineage(self, value): self._lineage = value @property def lineage_names(self): # Represents the lineage of a command in terms of command ``name`` return [cmd.name for cmd in self.lineage] @property def arg_table(self): if self._arg_table is None: self._arg_table = self._create_argument_table() return self._arg_table def __call__(self, args, parsed_globals): # Once we know we're trying to call a particular operation # of a service we can go ahead and load the parameters. event = 'before-building-argument-table-parser.%s.%s' % \ (self._parent_name, self._name) self._emit(event, argument_table=self.arg_table, args=args, session=self._session) operation_parser = self._create_operation_parser(self.arg_table) self._add_help(operation_parser) parsed_args, remaining = operation_parser.parse_known_args(args) if parsed_args.help == 'help': op_help = self.create_help_command() return op_help(remaining, parsed_globals) elif parsed_args.help: remaining.append(parsed_args.help) if remaining: raise UnknownArgumentError( "Unknown options: %s" % ', '.join(remaining)) event = 'operation-args-parsed.%s.%s' % (self._parent_name, self._name) self._emit(event, parsed_args=parsed_args, parsed_globals=parsed_globals) call_parameters = self._build_call_parameters( parsed_args, self.arg_table) event = 'calling-command.%s.%s' % (self._parent_name, self._name) override = self._emit_first_non_none_response( event, call_parameters=call_parameters, parsed_args=parsed_args, parsed_globals=parsed_globals ) # There are two possible values for override. It can be some type # of exception that will be raised if detected or it can represent # the desired return code. Note that a return code of 0 represents # a success. if override is not None: if isinstance(override, Exception): # If the override value provided back is an exception then # raise the exception raise override else: # This is the value usually returned by the ``invoke()`` # method of the operation caller. It represents the return # code of the operation. return override else: # No override value was supplied. return self._operation_caller.invoke( self._operation_model.service_model.service_name, self._operation_model.name, call_parameters, parsed_globals) def create_help_command(self): return OperationHelpCommand( self._session, operation_model=self._operation_model, arg_table=self.arg_table, name=self._name, event_class='.'.join(self.lineage_names)) def _add_help(self, parser): # The 'help' output is processed a little differently from # the operation help because the arg_table has # CLIArguments for values. parser.add_argument('help', nargs='?') def _build_call_parameters(self, args, arg_table): # We need to convert the args specified on the command # line as valid **kwargs we can hand to botocore. service_params = {} # args is an argparse.Namespace object so we're using vars() # so we can iterate over the parsed key/values. parsed_args = vars(args) for arg_object in arg_table.values(): py_name = arg_object.py_name if py_name in parsed_args: value = parsed_args[py_name] value = self._unpack_arg(arg_object, value) arg_object.add_to_params(service_params, value) return service_params def _unpack_arg(self, cli_argument, value): # Unpacks a commandline argument into a Python value by firing the # load-cli-arg.service-name.operation-name event. session = self._session service_name = self._operation_model.service_model.endpoint_prefix operation_name = xform_name(self._name, '-') return unpack_argument(session, service_name, operation_name, cli_argument, value) def _create_argument_table(self): argument_table = OrderedDict() input_shape = self._operation_model.input_shape required_arguments = [] arg_dict = {} if input_shape is not None: required_arguments = input_shape.required_members arg_dict = input_shape.members for arg_name, arg_shape in arg_dict.items(): cli_arg_name = xform_name(arg_name, '-') arg_class = self.ARG_TYPES.get(arg_shape.type_name, self.DEFAULT_ARG_CLASS) is_token = arg_shape.metadata.get('idempotencyToken', False) is_required = arg_name in required_arguments and not is_token event_emitter = self._session.get_component('event_emitter') arg_object = arg_class( name=cli_arg_name, argument_model=arg_shape, is_required=is_required, operation_model=self._operation_model, serialized_name=arg_name, event_emitter=event_emitter) arg_object.add_to_arg_table(argument_table) LOG.debug(argument_table) self._emit('building-argument-table.%s.%s' % (self._parent_name, self._name), operation_model=self._operation_model, session=self._session, command=self, argument_table=argument_table) return argument_table def _emit(self, name, **kwargs): return self._session.emit(name, **kwargs) def _emit_first_non_none_response(self, name, **kwargs): return self._session.emit_first_non_none_response( name, **kwargs) def _create_operation_parser(self, arg_table): parser = ArgTableArgParser(arg_table) return parser class CLIOperationCaller(object): """Call an AWS operation and format the response.""" def __init__(self, session): self._session = session def invoke(self, service_name, operation_name, parameters, parsed_globals): """Invoke an operation and format the response. :type service_name: str :param service_name: The name of the service. Note this is the service name, not the endpoint prefix (e.g. ``ses`` not ``email``). :type operation_name: str :param operation_name: The operation name of the service. The casing of the operation name should match the exact casing used by the service, e.g. ``DescribeInstances``, not ``describe-instances`` or ``describe_instances``. :type parameters: dict :param parameters: The parameters for the operation call. Again, these values have the same casing used by the service. :type parsed_globals: Namespace :param parsed_globals: The parsed globals from the command line. :return: None, the result is displayed through a formatter, but no value is returned. """ client = self._session.create_client( service_name, region_name=parsed_globals.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl) response = self._make_client_call( client, operation_name, parameters, parsed_globals) self._display_response(operation_name, response, parsed_globals) return 0 def _make_client_call(self, client, operation_name, parameters, parsed_globals): py_operation_name = xform_name(operation_name) if client.can_paginate(py_operation_name) and parsed_globals.paginate: paginator = client.get_paginator(py_operation_name) response = paginator.paginate(**parameters) else: response = getattr(client, xform_name(operation_name))( **parameters) return response def _display_response(self, command_name, response, parsed_globals): output = parsed_globals.output if output is None: output = self._session.get_config_variable('output') formatter = get_formatter(output, parsed_globals) formatter(command_name, response) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/commands.py0000644000000000000000000000407200000000000016544 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. class CLICommand(object): """Interface for a CLI command. This class represents a top level CLI command (``aws ec2``, ``aws s3``, ``aws config``). """ @property def name(self): # Subclasses must implement a name. raise NotImplementedError("name") @name.setter def name(self, value): # Subclasses must implement setting/changing the cmd name. raise NotImplementedError("name") @property def lineage(self): # Represents how to get to a specific command using the CLI. # It includes all commands that came before it and itself in # a list. return [self] @property def lineage_names(self): # Represents the lineage of a command in terms of command ``name`` return [cmd.name for cmd in self.lineage] def __call__(self, args, parsed_globals): """Invoke CLI operation. :type args: str :param args: The remaining command line args. :type parsed_globals: ``argparse.Namespace`` :param parsed_globals: The parsed arguments so far. :rtype: int :return: The return code of the operation. This will be used as the RC code for the ``aws`` process. """ # Subclasses are expected to implement this method. pass def create_help_command(self): # Subclasses are expected to implement this method if they want # help docs. return None @property def arg_table(self): return {} ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014272.0 awscli-1.22.34/awscli/compat.py0000644000000000000000000004467500000000000016243 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # http://aws.amazon.com/apache2.0/ # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys import re import shlex import os import os.path import platform import zipfile import signal import contextlib from botocore.compat import six #import botocore.compat from botocore.compat import OrderedDict # If you ever want to import from the vendored six. Add it here and then # import from awscli.compat. Also try to keep it in alphabetical order. # This may get large. advance_iterator = six.advance_iterator PY3 = six.PY3 queue = six.moves.queue shlex_quote = six.moves.shlex_quote StringIO = six.StringIO BytesIO = six.BytesIO urlopen = six.moves.urllib.request.urlopen binary_type = six.binary_type RawConfigParser = six.moves.configparser.RawConfigParser # Most, but not all, python installations will have zlib. This is required to # compress any files we send via a push. If we can't compress, we can still # package the files in a zip container. try: import zlib ZIP_COMPRESSION_MODE = zipfile.ZIP_DEFLATED except ImportError: ZIP_COMPRESSION_MODE = zipfile.ZIP_STORED try: import sqlite3 except ImportError: sqlite3 = None is_windows = sys.platform == 'win32' if is_windows: default_pager = 'more' else: default_pager = 'less -R' class StdinMissingError(Exception): def __init__(self): message = ( 'stdin is required for this operation, but is not available.' ) super(StdinMissingError, self).__init__(message) class NonTranslatedStdout(object): """ This context manager sets the line-end translation mode for stdout. It is deliberately set to binary mode so that `\r` does not get added to the line ending. This can be useful when printing commands where a windows style line ending would cause errors. """ def __enter__(self): if sys.platform == "win32": import msvcrt self.previous_mode = msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) return sys.stdout def __exit__(self, type, value, traceback): if sys.platform == "win32": import msvcrt msvcrt.setmode(sys.stdout.fileno(), self.previous_mode) def ensure_text_type(s): if isinstance(s, six.text_type): return s if isinstance(s, six.binary_type): return s.decode('utf-8') raise ValueError("Expected str, unicode or bytes, received %s." % type(s)) if six.PY3: import collections.abc as collections_abc import locale import urllib.parse as urlparse from urllib.error import URLError raw_input = input def get_binary_stdin(): if sys.stdin is None: raise StdinMissingError() return sys.stdin.buffer def get_binary_stdout(): return sys.stdout.buffer def _get_text_writer(stream, errors): return stream def compat_open(filename, mode='r', encoding=None): """Back-port open() that accepts an encoding argument. In python3 this uses the built in open() and in python2 this uses the io.open() function. If the file is not being opened in binary mode, then we'll use locale.getpreferredencoding() to find the preferred encoding. """ if 'b' not in mode: encoding = locale.getpreferredencoding() return open(filename, mode, encoding=encoding) def bytes_print(statement, stdout=None): """ This function is used to write raw bytes to stdout. """ if stdout is None: stdout = sys.stdout if getattr(stdout, 'buffer', None): stdout.buffer.write(statement) else: # If it is not possible to write to the standard out buffer. # The next best option is to decode and write to standard out. stdout.write(statement.decode('utf-8')) else: import codecs import collections as collections_abc import locale import io import urlparse from urllib2 import URLError raw_input = raw_input def get_binary_stdin(): if sys.stdin is None: raise StdinMissingError() return sys.stdin def get_binary_stdout(): return sys.stdout def _get_text_writer(stream, errors): # In python3, all the sys.stdout/sys.stderr streams are in text # mode. This means they expect unicode, and will encode the # unicode automatically before actually writing to stdout/stderr. # In python2, that's not the case. In order to provide a consistent # interface, we can create a wrapper around sys.stdout that will take # unicode, and automatically encode it to the preferred encoding. # That way consumers can just call get_text_writer(stream) and write # unicode to the returned stream. Note that get_text_writer # just returns the stream in the PY3 section above because python3 # handles this. # We're going to use the preferred encoding, but in cases that there is # no preferred encoding we're going to fall back to assuming ASCII is # what we should use. This will currently break the use of # PYTHONIOENCODING, which would require checking stream.encoding first, # however, the existing behavior is to only use # locale.getpreferredencoding() and so in the hope of not breaking what # is currently working, we will continue to only use that. encoding = locale.getpreferredencoding() if encoding is None: encoding = "ascii" return codecs.getwriter(encoding)(stream, errors) def compat_open(filename, mode='r', encoding=None): # See docstring for compat_open in the PY3 section above. if 'b' not in mode: encoding = locale.getpreferredencoding() return io.open(filename, mode, encoding=encoding) def bytes_print(statement, stdout=None): if stdout is None: stdout = sys.stdout stdout.write(statement) def get_stdout_text_writer(): return _get_text_writer(sys.stdout, errors="strict") def get_stderr_text_writer(): return _get_text_writer(sys.stderr, errors="replace") def compat_input(prompt): """ Cygwin's pty's are based on pipes. Therefore, when it interacts with a Win32 program (such as Win32 python), what that program sees is a pipe instead of a console. This is important because python buffers pipes, and so on a pty-based terminal, text will not necessarily appear immediately. In most cases, this isn't a big deal. But when we're doing an interactive prompt, the result is that the prompts won't display until we fill the buffer. Since raw_input does not flush the prompt, we need to manually write and flush it. See https://github.com/mintty/mintty/issues/56 for more details. """ sys.stdout.write(prompt) sys.stdout.flush() return raw_input() def compat_shell_quote(s, platform=None): """Return a shell-escaped version of the string *s* Unfortunately `shlex.quote` doesn't support Windows, so this method provides that functionality. """ if platform is None: platform = sys.platform if platform == "win32": return _windows_shell_quote(s) else: return shlex_quote(s) def _windows_shell_quote(s): """Return a Windows shell-escaped version of the string *s* Windows has potentially bizarre rules depending on where you look. When spawning a process via the Windows C runtime the rules are as follows: https://docs.microsoft.com/en-us/cpp/cpp/parsing-cpp-command-line-arguments To summarize the relevant bits: * Only space and tab are valid delimiters * Double quotes are the only valid quotes * Backslash is interpreted literally unless it is part of a chain that leads up to a double quote. Then the backslashes escape the backslashes, and if there is an odd number the final backslash escapes the quote. :param s: A string to escape :return: An escaped string """ if not s: return '""' buff = [] num_backspaces = 0 for character in s: if character == '\\': # We can't simply append backslashes because we don't know if # they will need to be escaped. Instead we separately keep track # of how many we've seen. num_backspaces += 1 elif character == '"': if num_backspaces > 0: # The backslashes are part of a chain that lead up to a # double quote, so they need to be escaped. buff.append('\\' * (num_backspaces * 2)) num_backspaces = 0 # The double quote also needs to be escaped. The fact that we're # seeing it at all means that it must have been escaped in the # original source. buff.append('\\"') else: if num_backspaces > 0: # The backslashes aren't part of a chain leading up to a # double quote, so they can be inserted directly without # being escaped. buff.append('\\' * num_backspaces) num_backspaces = 0 buff.append(character) # There may be some leftover backspaces if they were on the trailing # end, so they're added back in here. if num_backspaces > 0: buff.append('\\' * num_backspaces) new_s = ''.join(buff) if ' ' in new_s or '\t' in new_s: # If there are any spaces or tabs then the string needs to be double # quoted. return '"%s"' % new_s return new_s def get_popen_kwargs_for_pager_cmd(pager_cmd=None): """Returns the default pager to use dependent on platform :rtype: str :returns: A string represent the paging command to run based on the platform being used. """ popen_kwargs = {} if pager_cmd is None: pager_cmd = default_pager # Similar to what we do with the help command, we need to specify # shell as True to make it work in the pager for Windows if is_windows: popen_kwargs = {'shell': True} else: pager_cmd = shlex.split(pager_cmd) popen_kwargs['args'] = pager_cmd return popen_kwargs @contextlib.contextmanager def ignore_user_entered_signals(): """ Ignores user entered signals to avoid process getting killed. """ if is_windows: signal_list = [signal.SIGINT] else: signal_list = [signal.SIGINT, signal.SIGQUIT, signal.SIGTSTP] actual_signals = [] for user_signal in signal_list: actual_signals.append(signal.signal(user_signal, signal.SIG_IGN)) try: yield finally: for sig, user_signal in enumerate(signal_list): signal.signal(user_signal, actual_signals[sig]) # linux_distribution is used by the CodeDeploy customization. Python 3.8 # removed it from the stdlib, so it is vendored here in the case where the # import fails. try: from platform import linux_distribution except ImportError: _UNIXCONFDIR = '/etc' def _dist_try_harder(distname, version, id): """ Tries some special tricks to get the distribution information in case the default method fails. Currently supports older SuSE Linux, Caldera OpenLinux and Slackware Linux distributions. """ if os.path.exists('/var/adm/inst-log/info'): # SuSE Linux stores distribution information in that file distname = 'SuSE' with open('/var/adm/inst-log/info') as f: for line in f: tv = line.split() if len(tv) == 2: tag, value = tv else: continue if tag == 'MIN_DIST_VERSION': version = value.strip() elif tag == 'DIST_IDENT': values = value.split('-') id = values[2] return distname, version, id if os.path.exists('/etc/.installed'): # Caldera OpenLinux has some infos in that file (thanks to Colin Kong) with open('/etc/.installed') as f: for line in f: pkg = line.split('-') if len(pkg) >= 2 and pkg[0] == 'OpenLinux': # XXX does Caldera support non Intel platforms ? If yes, # where can we find the needed id ? return 'OpenLinux', pkg[1], id if os.path.isdir('/usr/lib/setup'): # Check for slackware version tag file (thanks to Greg Andruk) verfiles = os.listdir('/usr/lib/setup') for n in range(len(verfiles)-1, -1, -1): if verfiles[n][:14] != 'slack-version-': del verfiles[n] if verfiles: verfiles.sort() distname = 'slackware' version = verfiles[-1][14:] return distname, version, id return distname, version, id _release_filename = re.compile(r'(\w+)[-_](release|version)', re.ASCII) _lsb_release_version = re.compile(r'(.+)' r' release ' r'([\d.]+)' r'[^(]*(?:\((.+)\))?', re.ASCII) _release_version = re.compile(r'([^0-9]+)' r'(?: release )?' r'([\d.]+)' r'[^(]*(?:\((.+)\))?', re.ASCII) # See also http://www.novell.com/coolsolutions/feature/11251.html # and http://linuxmafia.com/faq/Admin/release-files.html # and http://data.linux-ntfs.org/rpm/whichrpm # and http://www.die.net/doc/linux/man/man1/lsb_release.1.html _supported_dists = ( 'SuSE', 'debian', 'fedora', 'redhat', 'centos', 'mandrake', 'mandriva', 'rocks', 'slackware', 'yellowdog', 'gentoo', 'UnitedLinux', 'turbolinux', 'arch', 'mageia') def _parse_release_file(firstline): # Default to empty 'version' and 'id' strings. Both defaults are used # when 'firstline' is empty. 'id' defaults to empty when an id can not # be deduced. version = '' id = '' # Parse the first line m = _lsb_release_version.match(firstline) if m is not None: # LSB format: "distro release x.x (codename)" return tuple(m.groups()) # Pre-LSB format: "distro x.x (codename)" m = _release_version.match(firstline) if m is not None: return tuple(m.groups()) # Unknown format... take the first two words l = firstline.strip().split() if l: version = l[0] if len(l) > 1: id = l[1] return '', version, id _distributor_id_file_re = re.compile("(?:DISTRIB_ID\s*=)\s*(.*)", re.I) _release_file_re = re.compile("(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I) _codename_file_re = re.compile("(?:DISTRIB_CODENAME\s*=)\s*(.*)", re.I) def linux_distribution(distname='', version='', id='', supported_dists=_supported_dists, full_distribution_name=1): return _linux_distribution(distname, version, id, supported_dists, full_distribution_name) def _linux_distribution(distname, version, id, supported_dists, full_distribution_name): """ Tries to determine the name of the Linux OS distribution name. The function first looks for a distribution release file in /etc and then reverts to _dist_try_harder() in case no suitable files are found. supported_dists may be given to define the set of Linux distributions to look for. It defaults to a list of currently supported Linux distributions identified by their release file name. If full_distribution_name is true (default), the full distribution read from the OS is returned. Otherwise the short name taken from supported_dists is used. Returns a tuple (distname, version, id) which default to the args given as parameters. """ # check for the Debian/Ubuntu /etc/lsb-release file first, needed so # that the distribution doesn't get identified as Debian. # https://bugs.python.org/issue9514 try: with open("/etc/lsb-release", "r") as etclsbrel: for line in etclsbrel: m = _distributor_id_file_re.search(line) if m: _u_distname = m.group(1).strip() m = _release_file_re.search(line) if m: _u_version = m.group(1).strip() m = _codename_file_re.search(line) if m: _u_id = m.group(1).strip() if _u_distname and _u_version: return (_u_distname, _u_version, _u_id) except (EnvironmentError, UnboundLocalError): pass try: etc = os.listdir(_UNIXCONFDIR) except OSError: # Probably not a Unix system return distname, version, id etc.sort() for file in etc: m = _release_filename.match(file) if m is not None: _distname, dummy = m.groups() if _distname in supported_dists: distname = _distname break else: return _dist_try_harder(distname, version, id) # Read the first line with open(os.path.join(_UNIXCONFDIR, file), 'r', encoding='utf-8', errors='surrogateescape') as f: firstline = f.readline() _distname, _version, _id = _parse_release_file(firstline) if _distname and full_distribution_name: distname = _distname if _version: version = _version if _id: id = _id return distname, version, id ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/completer.py0000755000000000000000000001336300000000000016743 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # http://aws.amazon.com/apache2.0/ # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import awscli.clidriver import sys import logging import copy LOG = logging.getLogger(__name__) class Completer(object): def __init__(self, driver=None): if driver is not None: self.driver = driver else: self.driver = awscli.clidriver.create_clidriver() self.main_help = self.driver.create_help_command() self.main_options = self._get_documented_completions( self.main_help.arg_table) def complete(self, cmdline, point=None): if point is None: point = len(cmdline) args = cmdline[0:point].split() current_arg = args[-1] cmd_args = [w for w in args if not w.startswith('-')] opts = [w for w in args if w.startswith('-')] cmd_name, cmd = self._get_command(self.main_help, cmd_args) subcmd_name, subcmd = self._get_command(cmd, cmd_args) if cmd_name is None: # If we didn't find any command names in the cmdline # lets try to complete provider options return self._complete_provider(current_arg, opts) elif subcmd_name is None: return self._complete_command(cmd_name, cmd, current_arg, opts) return self._complete_subcommand(subcmd_name, subcmd, current_arg, opts) def _complete_command(self, command_name, command_help, current_arg, opts): if current_arg == command_name: if command_help: return self._get_documented_completions( command_help.command_table) elif current_arg.startswith('-'): return self._find_possible_options(current_arg, opts) elif command_help is not None: # See if they have entered a partial command name return self._get_documented_completions( command_help.command_table, current_arg) return [] def _complete_subcommand(self, subcmd_name, subcmd_help, current_arg, opts): if current_arg != subcmd_name and current_arg.startswith('-'): return self._find_possible_options(current_arg, opts, subcmd_help) return [] def _complete_option(self, option_name): if option_name == '--endpoint-url': return [] if option_name == '--output': cli_data = self.driver.session.get_data('cli') return cli_data['options']['output']['choices'] if option_name == '--profile': return self.driver.session.available_profiles return [] def _complete_provider(self, current_arg, opts): if current_arg.startswith('-'): return self._find_possible_options(current_arg, opts) elif current_arg == 'aws': return self._get_documented_completions( self.main_help.command_table) else: # Otherwise, see if they have entered a partial command name return self._get_documented_completions( self.main_help.command_table, current_arg) def _get_command(self, command_help, command_args): if command_help is not None and command_help.command_table is not None: for command_name in command_args: if command_name in command_help.command_table: cmd_obj = command_help.command_table[command_name] return command_name, cmd_obj.create_help_command() return None, None def _get_documented_completions(self, table, startswith=None): names = [] for key, command in table.items(): if getattr(command, '_UNDOCUMENTED', False): # Don't tab complete undocumented commands/params continue if startswith is not None and not key.startswith(startswith): continue if getattr(command, 'positional_arg', False): continue names.append(key) return names def _find_possible_options(self, current_arg, opts, subcmd_help=None): all_options = copy.copy(self.main_options) if subcmd_help is not None: all_options += self._get_documented_completions( subcmd_help.arg_table) for option in opts: # Look through list of options on cmdline. If there are # options that have already been specified and they are # not the current word, remove them from list of possibles. if option != current_arg: stripped_opt = option.lstrip('-') if stripped_opt in all_options: all_options.remove(stripped_opt) cw = current_arg.lstrip('-') possibilities = ['--' + n for n in all_options if n.startswith(cw)] if len(possibilities) == 1 and possibilities[0] == current_arg: return self._complete_option(possibilities[0]) return possibilities def complete(cmdline, point): choices = Completer().complete(cmdline, point) print(' \n'.join(choices)) if __name__ == '__main__': if len(sys.argv) == 3: cmdline = sys.argv[1] point = int(sys.argv[2]) elif len(sys.argv) == 2: cmdline = sys.argv[1] else: print('usage: %s ' % sys.argv[0]) sys.exit(1) print(complete(cmdline, point)) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0892723 awscli-1.22.34/awscli/customizations/0000755000000000000000000000000000000000000017461 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/__init__.py0000644000000000000000000000271400000000000021576 0ustar00rootroot00000000000000# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """ Customizations ============== As we start to accumulate more and more of these *built-in* customizations we probably need to come up with some way to organize them and to make it easy to add them and register them. One idea I had was to place them all with a package like this. That at least keeps them all in one place. Each module in this package should contain a single customization (I think). To take it a step further, we could have each module define a couple of well-defined attributes: * ``EVENT`` would be a string containing the event that this customization needs to be registered with. Or, perhaps this should be a list of events? * ``handler`` is a callable that will be registered as the handler for the event. Using a convention like this, we could perhaps automatically discover all customizations and register them without having to manually edit ``handlers.py`` each time. """ ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/addexamples.py0000644000000000000000000000342500000000000022326 0ustar00rootroot00000000000000# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """ Add authored examples to MAN and HTML documentation --------------------------------------------------- This customization allows authored examples in ReST format to be inserted into the generated help for an Operation. To get this to work you need to: * Register the ``add_examples`` function below with the ``doc-examples.*.*`` event. * Create a file containing ReST format fragment with the examples. The file needs to be created in the ``examples/`` directory and needs to be named ``-.rst``. For example, ``examples/ec2/ec2-create-key-pair.rst``. """ import os import logging LOG = logging.getLogger(__name__) def add_examples(help_command, **kwargs): doc_path = os.path.join( os.path.dirname( os.path.dirname( os.path.abspath(__file__))), 'examples') doc_path = os.path.join(doc_path, help_command.event_class.replace('.', os.path.sep)) doc_path = doc_path + '.rst' LOG.debug("Looking for example file at: %s", doc_path) if os.path.isfile(doc_path): help_command.doc.style.h2('Examples') fp = open(doc_path) for line in fp.readlines(): help_command.doc.write(line) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/argrename.py0000644000000000000000000002160400000000000021777 0ustar00rootroot00000000000000# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """ """ from awscli.customizations import utils ARGUMENT_RENAMES = { # Mapping of original arg to renamed arg. # The key is ..argname # The first part of the key is used for event registration # so if you wanted to rename something for an entire service you # could say 'ec2.*.dry-run': 'renamed-arg-name', or if you wanted # to rename across all services you could say '*.*.dry-run': 'new-name'. 'ec2.create-image.no-no-reboot': 'reboot', 'ec2.*.no-egress': 'ingress', 'ec2.*.no-disable-api-termination': 'enable-api-termination', 'opsworks.*.region': 'stack-region', 'elastictranscoder.*.output': 'job-output', 'swf.register-activity-type.version': 'activity-version', 'swf.register-workflow-type.version': 'workflow-version', 'datapipeline.*.query': 'objects-query', 'datapipeline.get-pipeline-definition.version': 'pipeline-version', 'emr.*.job-flow-ids': 'cluster-ids', 'emr.*.job-flow-id': 'cluster-id', 'cloudsearchdomain.search.query': 'search-query', 'cloudsearchdomain.suggest.query': 'suggest-query', 'sns.subscribe.endpoint': 'notification-endpoint', 'deploy.*.s-3-location': 's3-location', 'deploy.*.ec-2-tag-filters': 'ec2-tag-filters', 'codepipeline.get-pipeline.version': 'pipeline-version', 'codepipeline.create-custom-action-type.version': 'action-version', 'codepipeline.delete-custom-action-type.version': 'action-version', 'kinesisanalytics.add-application-output.output': 'application-output', 'kinesisanalyticsv2.add-application-output.output': 'application-output', 'route53.delete-traffic-policy.version': 'traffic-policy-version', 'route53.get-traffic-policy.version': 'traffic-policy-version', 'route53.update-traffic-policy-comment.version': 'traffic-policy-version', 'gamelift.create-build.version': 'build-version', 'gamelift.update-build.version': 'build-version', 'gamelift.create-script.version': 'script-version', 'gamelift.update-script.version': 'script-version', 'route53domains.view-billing.start': 'start-time', 'route53domains.view-billing.end': 'end-time', 'apigateway.create-rest-api.version': 'api-version', 'apigatewayv2.create-api.version': 'api-version', 'apigatewayv2.update-api.version': 'api-version', 'pinpoint.get-campaign-version.version': 'campaign-version', 'pinpoint.get-segment-version.version': 'segment-version', 'pinpoint.delete-email-template.version': 'template-version', 'pinpoint.delete-in-app-template.version': 'template-version', 'pinpoint.delete-push-template.version': 'template-version', 'pinpoint.delete-sms-template.version': 'template-version', 'pinpoint.delete-voice-template.version': 'template-version', 'pinpoint.get-email-template.version': 'template-version', 'pinpoint.get-in-app-template.version': 'template-version', 'pinpoint.get-push-template.version': 'template-version', 'pinpoint.get-sms-template.version': 'template-version', 'pinpoint.get-voice-template.version': 'template-version', 'pinpoint.update-email-template.version': 'template-version', 'pinpoint.update-in-app-template.version': 'template-version', 'pinpoint.update-push-template.version': 'template-version', 'pinpoint.update-sms-template.version': 'template-version', 'pinpoint.update-voice-template.version': 'template-version', 'stepfunctions.send-task-success.output': 'task-output', 'clouddirectory.publish-schema.version': 'schema-version', 'mturk.list-qualification-types.query': 'types-query', 'workdocs.create-notification-subscription.endpoint': 'notification-endpoint', 'workdocs.describe-users.query': 'user-query', 'lex-models.delete-bot.version': 'bot-version', 'lex-models.delete-intent.version': 'intent-version', 'lex-models.delete-slot-type.version': 'slot-type-version', 'lex-models.get-intent.version': 'intent-version', 'lex-models.get-slot-type.version': 'slot-type-version', 'lex-models.delete-bot-version.version': 'bot-version', 'lex-models.delete-intent-version.version': 'intent-version', 'lex-models.delete-slot-type-version.version': 'slot-type-version', 'lex-models.get-export.version': 'resource-version', 'license-manager.get-grant.version': 'grant-version', 'license-manager.delete-grant.version': 'grant-version', 'license-manager.get-license.version': 'license-version', 'mobile.create-project.region': 'project-region', 'rekognition.create-stream-processor.output': 'stream-processor-output', 'eks.create-cluster.version': 'kubernetes-version', 'eks.update-cluster-version.version': 'kubernetes-version', 'eks.create-nodegroup.version': 'kubernetes-version', 'eks.update-nodegroup-version.version': 'kubernetes-version', 'schemas.*.version': 'schema-version', 'sagemaker.delete-image-version.version': 'version-number', 'sagemaker.describe-image-version.version': 'version-number', 'iotwireless.*.lo-ra-wan': 'lorawan', 'codepipeline.get-action-type.version': 'action-version', 'ecs.*.no-enable-execute-command': 'disable-execute-command', 'ecs.execute-command.no-interactive': 'non-interactive', } # Same format as ARGUMENT_RENAMES, but instead of renaming the arguments, # an alias is created to the original arugment and marked as undocumented. # This is useful when you need to change the name of an argument but you # still need to support the old argument. HIDDEN_ALIASES = { 'cognito-identity.create-identity-pool.open-id-connect-provider-arns': 'open-id-connect-provider-ar-ns', 'storagegateway.describe-tapes.tape-arns': 'tape-ar-ns', 'storagegateway.describe-tape-archives.tape-arns': 'tape-ar-ns', 'storagegateway.describe-vtl-devices.vtl-device-arns': 'vtl-device-ar-ns', 'storagegateway.describe-cached-iscsi-volumes.volume-arns': 'volume-ar-ns', 'storagegateway.describe-stored-iscsi-volumes.volume-arns': 'volume-ar-ns', 'route53domains.view-billing.start-time': 'start', # These come from the xform_name() changes that no longer separates words # by numbers. 'deploy.create-deployment-group.ec2-tag-set': 'ec-2-tag-set', 'deploy.list-application-revisions.s3-bucket': 's-3-bucket', 'deploy.list-application-revisions.s3-key-prefix': 's-3-key-prefix', 'deploy.update-deployment-group.ec2-tag-set': 'ec-2-tag-set', 'iam.enable-mfa-device.authentication-code1': 'authentication-code-1', 'iam.enable-mfa-device.authentication-code2': 'authentication-code-2', 'iam.resync-mfa-device.authentication-code1': 'authentication-code-1', 'iam.resync-mfa-device.authentication-code2': 'authentication-code-2', 'importexport.get-shipping-label.street1': 'street-1', 'importexport.get-shipping-label.street2': 'street-2', 'importexport.get-shipping-label.street3': 'street-3', 'lambda.publish-version.code-sha256': 'code-sha-256', 'lightsail.import-key-pair.public-key-base64': 'public-key-base-64', 'opsworks.register-volume.ec2-volume-id': 'ec-2-volume-id', 'mgn.*.replication-servers-security-groups-ids': 'replication-servers-security-groups-i-ds', 'mgn.*.source-server-ids': 'source-server-i-ds', 'mgn.*.replication-configuration-template-ids': 'replication-configuration-template-i-ds', 'elasticache.create-replication-group.preferred-cache-cluster-azs': 'preferred-cache-cluster-a-zs' } def register_arg_renames(cli): for original, new_name in ARGUMENT_RENAMES.items(): event_portion, original_arg_name = original.rsplit('.', 1) cli.register('building-argument-table.%s' % event_portion, rename_arg(original_arg_name, new_name)) for original, new_name in HIDDEN_ALIASES.items(): event_portion, original_arg_name = original.rsplit('.', 1) cli.register('building-argument-table.%s' % event_portion, hidden_alias(original_arg_name, new_name)) def rename_arg(original_arg_name, new_name): def _rename_arg(argument_table, **kwargs): if original_arg_name in argument_table: utils.rename_argument(argument_table, original_arg_name, new_name) return _rename_arg def hidden_alias(original_arg_name, alias_name): def _alias_arg(argument_table, **kwargs): if original_arg_name in argument_table: utils.make_hidden_alias(argument_table, original_arg_name, alias_name) return _alias_arg ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/arguments.py0000644000000000000000000001200600000000000022037 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os from awscli.arguments import CustomArgument import jmespath def resolve_given_outfile_path(path): """Asserts that a path is writable and returns the expanded path""" if path is None: return outfile = os.path.expanduser(os.path.expandvars(path)) if not os.access(os.path.dirname(os.path.abspath(outfile)), os.W_OK): raise ValueError('Unable to write to file: %s' % outfile) return outfile def is_parsed_result_successful(parsed_result): """Returns True if a parsed result is successful""" return parsed_result['ResponseMetadata']['HTTPStatusCode'] < 300 class OverrideRequiredArgsArgument(CustomArgument): """An argument that if specified makes all other arguments not required By not required, it refers to not having an error thrown when the parser does not find an argument that is required on the command line. To obtain this argument's property of ignoring required arguments, subclass from this class and fill out the ``ARG_DATA`` parameter as described below. Note this class is really only useful for subclassing. """ # ``ARG_DATA`` follows the same format as a member of ``ARG_TABLE`` in # ``BasicCommand`` class as specified in # ``awscli/customizations/commands.py``. # # For example, an ``ARG_DATA`` variable would be filled out as: # # ARG_DATA = # {'name': 'my-argument', # 'help_text': 'This is argument ensures the argument is specified' # 'no other arguments are required'} ARG_DATA = {'name': 'no-required-args'} def __init__(self, session): self._session = session self._register_argument_action() super(OverrideRequiredArgsArgument, self).__init__(**self.ARG_DATA) def _register_argument_action(self): self._session.register('before-building-argument-table-parser', self.override_required_args) def override_required_args(self, argument_table, args, **kwargs): name_in_cmdline = '--' + self.name # Set all ``Argument`` objects in ``argument_table`` to not required # if this argument's name is present in the command line. if name_in_cmdline in args: for arg_name in argument_table.keys(): argument_table[arg_name].required = False class StatefulArgument(CustomArgument): """An argument that maintains a stateful value""" def __init__(self, *args, **kwargs): super(StatefulArgument, self).__init__(*args, **kwargs) self._value = None def add_to_params(self, parameters, value): super(StatefulArgument, self).add_to_params(parameters, value) self._value = value @property def value(self): return self._value class QueryOutFileArgument(StatefulArgument): """An argument that write a JMESPath query result to a file""" def __init__(self, session, name, query, after_call_event, perm, *args, **kwargs): self._session = session self._query = query self._after_call_event = after_call_event self._perm = perm # Generate default help_text if text was not provided. if 'help_text' not in kwargs: kwargs['help_text'] = ('Saves the command output contents of %s ' 'to the given filename' % self.query) super(QueryOutFileArgument, self).__init__(name, *args, **kwargs) @property def query(self): return self._query @property def perm(self): return self._perm def add_to_params(self, parameters, value): value = resolve_given_outfile_path(value) super(QueryOutFileArgument, self).add_to_params(parameters, value) if self.value is not None: # Only register the event to save the argument if it is set self._session.register(self._after_call_event, self.save_query) def save_query(self, parsed, **kwargs): """Saves the result of a JMESPath expression to a file. This method only saves the query data if the response code of the parsed result is < 300. """ if is_parsed_result_successful(parsed): contents = jmespath.search(self.query, parsed) with open(self.value, 'w') as fp: # Don't write 'None' to a file -- write ''. if contents is None: fp.write('') else: fp.write(contents) os.chmod(self.value, self.perm) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/assumerole.py0000644000000000000000000000347300000000000022221 0ustar00rootroot00000000000000import os import logging from botocore.exceptions import ProfileNotFound from botocore.credentials import JSONFileCache LOG = logging.getLogger(__name__) CACHE_DIR = os.path.expanduser(os.path.join('~', '.aws', 'cli', 'cache')) def register_assume_role_provider(event_handlers): event_handlers.register('session-initialized', inject_assume_role_provider_cache, unique_id='inject_assume_role_cred_provider_cache') def inject_assume_role_provider_cache(session, **kwargs): try: cred_chain = session.get_component('credential_provider') except ProfileNotFound: # If a user has provided a profile that does not exist, # trying to retrieve components/config on the session # will raise ProfileNotFound. Sometimes this is invalid: # # "ec2 describe-instances --profile unknown" # # and sometimes this is perfectly valid: # # "configure set region us-west-2 --profile brand-new-profile" # # Because we can't know (and don't want to know) whether # the customer is trying to do something valid, we just # immediately return. If it's invalid something else # up the stack will raise ProfileNotFound, otherwise # the configure (and other) commands will work as expected. LOG.debug("ProfileNotFound caught when trying to inject " "assume-role cred provider cache. Not configuring " "JSONFileCache for assume-role.") return assume_role_provider = cred_chain.get_provider('assume-role') assume_role_provider.cache = JSONFileCache(CACHE_DIR) web_identity_provider = cred_chain.get_provider( 'assume-role-with-web-identity' ) web_identity_provider.cache = JSONFileCache(CACHE_DIR) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/awslambda.py0000644000000000000000000001353600000000000021776 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import zipfile import copy from contextlib import closing from botocore.vendored import six from awscli.arguments import CustomArgument, CLIArgument ERROR_MSG = ( "--zip-file must be a zip file with the fileb:// prefix.\n" "Example usage: --zip-file fileb://path/to/file.zip") ZIP_DOCSTRING = ( '

The path to the zip file of the {param_type} you are uploading. ' 'Specify --zip-file or --{param_type}, but not both. ' 'Example: fileb://{param_type}.zip

' ) def register_lambda_create_function(cli): cli.register('building-argument-table.lambda.create-function', ZipFileArgumentHoister('Code').hoist) cli.register('building-argument-table.lambda.publish-layer-version', ZipFileArgumentHoister('Content').hoist) cli.register('building-argument-table.lambda.update-function-code', _modify_zipfile_docstring) cli.register('process-cli-arg.lambda.update-function-code', validate_is_zip_file) def validate_is_zip_file(cli_argument, value, **kwargs): if cli_argument.name == 'zip-file': _should_contain_zip_content(value) class ZipFileArgumentHoister(object): """Hoists a ZipFile argument up to the top level. Injects a top-level ZipFileArgument into the argument table which maps a --zip-file parameter to the underlying ``serialized_name`` ZipFile shape. Repalces the old ZipFile argument with an instance of ReplacedZipFileArgument to prevent its usage and recommend the new top-level injected parameter. """ def __init__(self, serialized_name): self._serialized_name = serialized_name self._name = serialized_name.lower() def hoist(self, session, argument_table, **kwargs): help_text = ZIP_DOCSTRING.format(param_type=self._name) argument_table['zip-file'] = ZipFileArgument( 'zip-file', help_text=help_text, cli_type_name='blob', serialized_name=self._serialized_name ) argument = argument_table[self._name] model = copy.deepcopy(argument.argument_model) del model.members['ZipFile'] argument_table[self._name] = ReplacedZipFileArgument( name=self._name, argument_model=model, operation_model=argument._operation_model, is_required=False, event_emitter=session.get_component('event_emitter'), serialized_name=self._serialized_name, ) def _modify_zipfile_docstring(session, argument_table, **kwargs): if 'zip-file' in argument_table: argument_table['zip-file'].documentation = ZIP_DOCSTRING def _should_contain_zip_content(value): if not isinstance(value, bytes): # If it's not bytes it's basically impossible for # this to be valid zip content, but we'll at least # still try to load the contents as a zip file # to be absolutely sure. value = value.encode('utf-8') fileobj = six.BytesIO(value) try: with closing(zipfile.ZipFile(fileobj)) as f: f.infolist() except zipfile.BadZipfile: raise ValueError(ERROR_MSG) class ZipFileArgument(CustomArgument): """A new ZipFile argument to be injected at the top level. This class injects a ZipFile argument under the specified serialized_name parameter. This can be used to take a top level parameter like --zip-file and inject it into a nested different parameter like Code so --zip-file foo.zip winds up being serilized as { 'Code': { 'ZipFile': } }. """ def __init__(self, *args, **kwargs): self._param_to_replace = kwargs.pop('serialized_name') super(ZipFileArgument, self).__init__(*args, **kwargs) def add_to_params(self, parameters, value): if value is None: return _should_contain_zip_content(value) zip_file_param = {'ZipFile': value} if parameters.get(self._param_to_replace): parameters[self._param_to_replace].update(zip_file_param) else: parameters[self._param_to_replace] = zip_file_param class ReplacedZipFileArgument(CLIArgument): """A replacement arugment for nested ZipFile argument. This prevents the use of a non-working nested argument that expects binary. Instead an instance of ZipFileArgument should be injected at the top level and used instead. That way fileb:// can be used to load the binary contents. And the argument class can inject those bytes into the correct serialization name. """ def __init__(self, *args, **kwargs): super(ReplacedZipFileArgument, self).__init__(*args, **kwargs) self._cli_name = '--%s' % kwargs['name'] self._param_to_replace = kwargs['serialized_name'] def add_to_params(self, parameters, value): if value is None: return unpacked = self._unpack_argument(value) if 'ZipFile' in unpacked: raise ValueError( "ZipFile cannot be provided " "as part of the %s argument. " "Please use the '--zip-file' " "option instead to specify a zip file." % self._cli_name) if parameters.get(self._param_to_replace): parameters[self._param_to_replace].update(unpacked) else: parameters[self._param_to_replace] = unpacked ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cliinputjson.py0000644000000000000000000000745300000000000022565 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import json from awscli.paramfile import get_paramfile, LOCAL_PREFIX_MAP from awscli.argprocess import ParamError from awscli.customizations.arguments import OverrideRequiredArgsArgument def register_cli_input_json(cli): cli.register('building-argument-table', add_cli_input_json) def add_cli_input_json(session, argument_table, **kwargs): # This argument cannot support operations with streaming output which # is designated by the argument name `outfile`. if 'outfile' not in argument_table: cli_input_json_argument = CliInputJSONArgument(session) cli_input_json_argument.add_to_arg_table(argument_table) class CliInputJSONArgument(OverrideRequiredArgsArgument): """This argument inputs a JSON string as the entire input for a command. Ideally, the value to this argument should be a filled out JSON file generated by ``--generate-cli-skeleton``. The items in the JSON string will not clobber other arguments entered into the command line. """ ARG_DATA = { 'name': 'cli-input-json', 'help_text': 'Performs service operation based on the JSON string ' 'provided. The JSON string follows the format provided ' 'by ``--generate-cli-skeleton``. If other arguments are ' 'provided on the command line, the CLI values will override ' 'the JSON-provided values. It is not possible to pass ' 'arbitrary binary values using a JSON-provided value as ' 'the string will be taken literally.' } def __init__(self, session): super(CliInputJSONArgument, self).__init__(session) def _register_argument_action(self): self._session.register( 'calling-command.*', self.add_to_call_parameters) super(CliInputJSONArgument, self)._register_argument_action() def add_to_call_parameters(self, call_parameters, parsed_args, parsed_globals, **kwargs): # Check if ``--cli-input-json`` was specified in the command line. input_json = getattr(parsed_args, 'cli_input_json', None) if input_json is not None: # Retrieve the JSON from the file if needed. retrieved_json = get_paramfile(input_json, LOCAL_PREFIX_MAP) # Nothing was retrieved from the file. So assume the argument # is already a JSON string. if retrieved_json is None: retrieved_json = input_json try: # Try to load the JSON string into a python dictionary input_data = json.loads(retrieved_json) except ValueError as e: raise ParamError( self.name, "Invalid JSON: %s\nJSON received: %s" % (e, retrieved_json)) # Add the members from the input JSON to the call parameters. self._update_call_parameters(call_parameters, input_data) def _update_call_parameters(self, call_parameters, input_data): for input_key in input_data.keys(): # Only add the values to ``call_parameters`` if not already # present. if input_key not in call_parameters: call_parameters[input_key] = input_data[input_key] ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0892723 awscli-1.22.34/awscli/customizations/cloudformation/0000755000000000000000000000000000000000000022506 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudformation/__init__.py0000644000000000000000000000240100000000000024614 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from awscli.customizations.cloudformation.package import PackageCommand from awscli.customizations.cloudformation.deploy import DeployCommand def initialize(cli): """ The entry point for CloudFormation high level commands. """ cli.register('building-command-table.cloudformation', inject_commands) def inject_commands(command_table, session, **kwargs): """ Called when the CloudFormation command table is being built. Used to inject new high level commands into the command list. These high level commands must not collide with existing low-level API call names. """ command_table['package'] = PackageCommand(session) command_table['deploy'] = DeployCommand(session) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudformation/artifact_exporter.py0000644000000000000000000005371400000000000026617 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging import os import tempfile import zipfile import contextlib import uuid import shutil from awscli.compat import six from botocore.utils import set_value_from_jmespath from awscli.compat import urlparse from contextlib import contextmanager from awscli.customizations.cloudformation import exceptions from awscli.customizations.cloudformation.yamlhelper import yaml_dump, \ yaml_parse import jmespath LOG = logging.getLogger(__name__) def is_path_value_valid(path): return isinstance(path, six.string_types) def make_abs_path(directory, path): if is_path_value_valid(path) and not os.path.isabs(path): return os.path.normpath(os.path.join(directory, path)) else: return path def is_s3_url(url): try: parse_s3_url(url) return True except ValueError: return False def is_local_folder(path): return is_path_value_valid(path) and os.path.isdir(path) def is_local_file(path): return is_path_value_valid(path) and os.path.isfile(path) def is_zip_file(path): return ( is_path_value_valid(path) and zipfile.is_zipfile(path)) def parse_s3_url(url, bucket_name_property="Bucket", object_key_property="Key", version_property=None): if isinstance(url, six.string_types) \ and url.startswith("s3://"): # Python < 2.7.10 don't parse query parameters from URI with custom # scheme such as s3://blah/blah. As a workaround, remove scheme # altogether to trigger the parser "s3://foo/bar?v=1" =>"//foo/bar?v=1" parsed = urlparse.urlparse(url[3:]) query = urlparse.parse_qs(parsed.query) if parsed.netloc and parsed.path: result = dict() result[bucket_name_property] = parsed.netloc result[object_key_property] = parsed.path.lstrip('/') # If there is a query string that has a single versionId field, # set the object version and return if version_property is not None \ and 'versionId' in query \ and len(query['versionId']) == 1: result[version_property] = query['versionId'][0] return result raise ValueError("URL given to the parse method is not a valid S3 url " "{0}".format(url)) def upload_local_artifacts(resource_id, resource_dict, property_name, parent_dir, uploader): """ Upload local artifacts referenced by the property at given resource and return S3 URL of the uploaded object. It is the responsibility of callers to ensure property value is a valid string If path refers to a file, this method will upload the file. If path refers to a folder, this method will zip the folder and upload the zip to S3. If path is omitted, this method will zip the current working folder and upload. If path is already a path to S3 object, this method does nothing. :param resource_id: Id of the CloudFormation resource :param resource_dict: Dictionary containing resource definition :param property_name: Property name of CloudFormation resource where this local path is present :param parent_dir: Resolve all relative paths with respect to this directory :param uploader: Method to upload files to S3 :return: S3 URL of the uploaded object :raise: ValueError if path is not a S3 URL or a local path """ local_path = jmespath.search(property_name, resource_dict) if local_path is None: # Build the root directory and upload to S3 local_path = parent_dir if is_s3_url(local_path): # A valid CloudFormation template will specify artifacts as S3 URLs. # This check is supporting the case where your resource does not # refer to local artifacts # Nothing to do if property value is an S3 URL LOG.debug("Property {0} of {1} is already a S3 URL" .format(property_name, resource_id)) return local_path local_path = make_abs_path(parent_dir, local_path) # Or, pointing to a folder. Zip the folder and upload if is_local_folder(local_path): return zip_and_upload(local_path, uploader) # Path could be pointing to a file. Upload the file elif is_local_file(local_path): return uploader.upload_with_dedup(local_path) raise exceptions.InvalidLocalPathError( resource_id=resource_id, property_name=property_name, local_path=local_path) def zip_and_upload(local_path, uploader): with zip_folder(local_path) as zipfile: return uploader.upload_with_dedup(zipfile) @contextmanager def zip_folder(folder_path): """ Zip the entire folder and return a file to the zip. Use this inside a "with" statement to cleanup the zipfile after it is used. :param folder_path: :return: Name of the zipfile """ filename = os.path.join( tempfile.gettempdir(), "data-" + uuid.uuid4().hex) zipfile_name = make_zip(filename, folder_path) try: yield zipfile_name finally: if os.path.exists(zipfile_name): os.remove(zipfile_name) def make_zip(filename, source_root): zipfile_name = "{0}.zip".format(filename) source_root = os.path.abspath(source_root) with open(zipfile_name, 'wb') as f: zip_file = zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED) with contextlib.closing(zip_file) as zf: for root, dirs, files in os.walk(source_root, followlinks=True): for filename in files: full_path = os.path.join(root, filename) relative_path = os.path.relpath( full_path, source_root) zf.write(full_path, relative_path) return zipfile_name @contextmanager def mktempfile(): directory = tempfile.gettempdir() filename = os.path.join(directory, uuid.uuid4().hex) try: with open(filename, "w+") as handle: yield handle finally: if os.path.exists(filename): os.remove(filename) def copy_to_temp_dir(filepath): tmp_dir = tempfile.mkdtemp() dst = os.path.join(tmp_dir, os.path.basename(filepath)) shutil.copy(filepath, dst) return tmp_dir class Resource(object): """ Base class representing a CloudFormation resource that can be exported """ RESOURCE_TYPE = None PROPERTY_NAME = None PACKAGE_NULL_PROPERTY = True # Set this property to True in base class if you want the exporter to zip # up the file before uploading This is useful for Lambda functions. FORCE_ZIP = False def __init__(self, uploader): self.uploader = uploader def export(self, resource_id, resource_dict, parent_dir): if resource_dict is None: return property_value = jmespath.search(self.PROPERTY_NAME, resource_dict) if not property_value and not self.PACKAGE_NULL_PROPERTY: return if isinstance(property_value, dict): LOG.debug("Property {0} of {1} resource is not a URL" .format(self.PROPERTY_NAME, resource_id)) return # If property is a file but not a zip file, place file in temp # folder and send the temp folder to be zipped temp_dir = None if is_local_file(property_value) and not \ is_zip_file(property_value) and self.FORCE_ZIP: temp_dir = copy_to_temp_dir(property_value) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, temp_dir) try: self.do_export(resource_id, resource_dict, parent_dir) except Exception as ex: LOG.debug("Unable to export", exc_info=ex) raise exceptions.ExportFailedError( resource_id=resource_id, property_name=self.PROPERTY_NAME, property_value=property_value, ex=ex) finally: if temp_dir: shutil.rmtree(temp_dir) def do_export(self, resource_id, resource_dict, parent_dir): """ Default export action is to upload artifacts and set the property to S3 URL of the uploaded object """ uploaded_url = upload_local_artifacts(resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url) class ResourceWithS3UrlDict(Resource): """ Represents CloudFormation resources that need the S3 URL to be specified as an dict like {Bucket: "", Key: "", Version: ""} """ BUCKET_NAME_PROPERTY = None OBJECT_KEY_PROPERTY = None VERSION_PROPERTY = None def __init__(self, uploader): super(ResourceWithS3UrlDict, self).__init__(uploader) def do_export(self, resource_id, resource_dict, parent_dir): """ Upload to S3 and set property to an dict representing the S3 url of the uploaded object """ artifact_s3_url = \ upload_local_artifacts(resource_id, resource_dict, self.PROPERTY_NAME, parent_dir, self.uploader) parsed_url = parse_s3_url( artifact_s3_url, bucket_name_property=self.BUCKET_NAME_PROPERTY, object_key_property=self.OBJECT_KEY_PROPERTY, version_property=self.VERSION_PROPERTY) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url) class ServerlessFunctionResource(Resource): RESOURCE_TYPE = "AWS::Serverless::Function" PROPERTY_NAME = "CodeUri" FORCE_ZIP = True class ServerlessApiResource(Resource): RESOURCE_TYPE = "AWS::Serverless::Api" PROPERTY_NAME = "DefinitionUri" # Don't package the directory if DefinitionUri is omitted. # Necessary to support DefinitionBody PACKAGE_NULL_PROPERTY = False class GraphQLSchemaResource(Resource): RESOURCE_TYPE = "AWS::AppSync::GraphQLSchema" PROPERTY_NAME = "DefinitionS3Location" # Don't package the directory if DefinitionS3Location is omitted. # Necessary to support Definition PACKAGE_NULL_PROPERTY = False class AppSyncResolverRequestTemplateResource(Resource): RESOURCE_TYPE = "AWS::AppSync::Resolver" PROPERTY_NAME = "RequestMappingTemplateS3Location" # Don't package the directory if RequestMappingTemplateS3Location is omitted. # Necessary to support RequestMappingTemplate PACKAGE_NULL_PROPERTY = False class AppSyncResolverResponseTemplateResource(Resource): RESOURCE_TYPE = "AWS::AppSync::Resolver" PROPERTY_NAME = "ResponseMappingTemplateS3Location" # Don't package the directory if ResponseMappingTemplateS3Location is omitted. # Necessary to support ResponseMappingTemplate PACKAGE_NULL_PROPERTY = False class AppSyncFunctionConfigurationRequestTemplateResource(Resource): RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration" PROPERTY_NAME = "RequestMappingTemplateS3Location" # Don't package the directory if RequestMappingTemplateS3Location is omitted. # Necessary to support RequestMappingTemplate PACKAGE_NULL_PROPERTY = False class AppSyncFunctionConfigurationResponseTemplateResource(Resource): RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration" PROPERTY_NAME = "ResponseMappingTemplateS3Location" # Don't package the directory if ResponseMappingTemplateS3Location is omitted. # Necessary to support ResponseMappingTemplate PACKAGE_NULL_PROPERTY = False class LambdaFunctionResource(ResourceWithS3UrlDict): RESOURCE_TYPE = "AWS::Lambda::Function" PROPERTY_NAME = "Code" BUCKET_NAME_PROPERTY = "S3Bucket" OBJECT_KEY_PROPERTY = "S3Key" VERSION_PROPERTY = "S3ObjectVersion" FORCE_ZIP = True class ApiGatewayRestApiResource(ResourceWithS3UrlDict): RESOURCE_TYPE = "AWS::ApiGateway::RestApi" PROPERTY_NAME = "BodyS3Location" PACKAGE_NULL_PROPERTY = False BUCKET_NAME_PROPERTY = "Bucket" OBJECT_KEY_PROPERTY = "Key" VERSION_PROPERTY = "Version" class ElasticBeanstalkApplicationVersion(ResourceWithS3UrlDict): RESOURCE_TYPE = "AWS::ElasticBeanstalk::ApplicationVersion" PROPERTY_NAME = "SourceBundle" BUCKET_NAME_PROPERTY = "S3Bucket" OBJECT_KEY_PROPERTY = "S3Key" VERSION_PROPERTY = None class LambdaLayerVersionResource(ResourceWithS3UrlDict): RESOURCE_TYPE = "AWS::Lambda::LayerVersion" PROPERTY_NAME = "Content" BUCKET_NAME_PROPERTY = "S3Bucket" OBJECT_KEY_PROPERTY = "S3Key" VERSION_PROPERTY = "S3ObjectVersion" FORCE_ZIP = True class ServerlessLayerVersionResource(Resource): RESOURCE_TYPE = "AWS::Serverless::LayerVersion" PROPERTY_NAME = "ContentUri" FORCE_ZIP = True class ServerlessRepoApplicationReadme(Resource): RESOURCE_TYPE = "AWS::ServerlessRepo::Application" PROPERTY_NAME = "ReadmeUrl" PACKAGE_NULL_PROPERTY = False class ServerlessRepoApplicationLicense(Resource): RESOURCE_TYPE = "AWS::ServerlessRepo::Application" PROPERTY_NAME = "LicenseUrl" PACKAGE_NULL_PROPERTY = False class StepFunctionsStateMachineDefinitionResource(ResourceWithS3UrlDict): RESOURCE_TYPE = "AWS::StepFunctions::StateMachine" PROPERTY_NAME = "DefinitionS3Location" BUCKET_NAME_PROPERTY = "Bucket" OBJECT_KEY_PROPERTY = "Key" VERSION_PROPERTY = "Version" PACKAGE_NULL_PROPERTY = False class CloudFormationStackResource(Resource): """ Represents CloudFormation::Stack resource that can refer to a nested stack template via TemplateURL property. """ RESOURCE_TYPE = "AWS::CloudFormation::Stack" PROPERTY_NAME = "TemplateURL" def __init__(self, uploader): super(CloudFormationStackResource, self).__init__(uploader) def do_export(self, resource_id, resource_dict, parent_dir): """ If the nested stack template is valid, this method will export on the nested template, upload the exported template to S3 and set property to URL of the uploaded S3 template """ template_path = resource_dict.get(self.PROPERTY_NAME, None) if template_path is None or is_s3_url(template_path) or \ template_path.startswith("http://") or \ template_path.startswith("https://"): # Nothing to do return abs_template_path = make_abs_path(parent_dir, template_path) if not is_local_file(abs_template_path): raise exceptions.InvalidTemplateUrlParameterError( property_name=self.PROPERTY_NAME, resource_id=resource_id, template_path=abs_template_path) exported_template_dict = \ Template(template_path, parent_dir, self.uploader).export() exported_template_str = yaml_dump(exported_template_dict) with mktempfile() as temporary_file: temporary_file.write(exported_template_str) temporary_file.flush() url = self.uploader.upload_with_dedup( temporary_file.name, "template") # TemplateUrl property requires S3 URL to be in path-style format parts = parse_s3_url(url, version_property="Version") s3_path_url = self.uploader.to_path_style_s3_url( parts["Key"], parts.get("Version", None)) set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url) class ServerlessApplicationResource(CloudFormationStackResource): """ Represents Serverless::Application resource that can refer to a nested app template via Location property. """ RESOURCE_TYPE = "AWS::Serverless::Application" PROPERTY_NAME = "Location" class GlueJobCommandScriptLocationResource(Resource): """ Represents Glue::Job resource. """ RESOURCE_TYPE = "AWS::Glue::Job" # Note the PROPERTY_NAME includes a '.' implying it's nested. PROPERTY_NAME = "Command.ScriptLocation" RESOURCES_EXPORT_LIST = [ ServerlessFunctionResource, ServerlessApiResource, GraphQLSchemaResource, AppSyncResolverRequestTemplateResource, AppSyncResolverResponseTemplateResource, AppSyncFunctionConfigurationRequestTemplateResource, AppSyncFunctionConfigurationResponseTemplateResource, ApiGatewayRestApiResource, LambdaFunctionResource, ElasticBeanstalkApplicationVersion, CloudFormationStackResource, ServerlessApplicationResource, ServerlessLayerVersionResource, LambdaLayerVersionResource, GlueJobCommandScriptLocationResource, StepFunctionsStateMachineDefinitionResource ] METADATA_EXPORT_LIST = [ ServerlessRepoApplicationReadme, ServerlessRepoApplicationLicense ] def include_transform_export_handler(template_dict, uploader, parent_dir): if template_dict.get("Name", None) != "AWS::Include": return template_dict include_location = template_dict.get("Parameters", {}).get("Location", None) if not include_location \ or not is_path_value_valid(include_location) \ or is_s3_url(include_location): # `include_location` is either empty, or not a string, or an S3 URI return template_dict # We are confident at this point that `include_location` is a string containing the local path abs_include_location = os.path.join(parent_dir, include_location) if is_local_file(abs_include_location): template_dict["Parameters"]["Location"] = uploader.upload_with_dedup(abs_include_location) else: raise exceptions.InvalidLocalPathError( resource_id="AWS::Include", property_name="Location", local_path=abs_include_location) return template_dict GLOBAL_EXPORT_DICT = { "Fn::Transform": include_transform_export_handler } class Template(object): """ Class to export a CloudFormation template """ def __init__(self, template_path, parent_dir, uploader, resources_to_export=RESOURCES_EXPORT_LIST, metadata_to_export=METADATA_EXPORT_LIST): """ Reads the template and makes it ready for export """ if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)): raise ValueError("parent_dir parameter must be " "an absolute path to a folder {0}" .format(parent_dir)) abs_template_path = make_abs_path(parent_dir, template_path) template_dir = os.path.dirname(abs_template_path) with open(abs_template_path, "r") as handle: template_str = handle.read() self.template_dict = yaml_parse(template_str) self.template_dir = template_dir self.resources_to_export = resources_to_export self.metadata_to_export = metadata_to_export self.uploader = uploader def export_global_artifacts(self, template_dict): """ Template params such as AWS::Include transforms are not specific to any resource type but contain artifacts that should be exported, here we iterate through the template dict and export params with a handler defined in GLOBAL_EXPORT_DICT """ for key, val in template_dict.items(): if key in GLOBAL_EXPORT_DICT: template_dict[key] = GLOBAL_EXPORT_DICT[key](val, self.uploader, self.template_dir) elif isinstance(val, dict): self.export_global_artifacts(val) elif isinstance(val, list): for item in val: if isinstance(item, dict): self.export_global_artifacts(item) return template_dict def export_metadata(self, template_dict): """ Exports the local artifacts referenced by the metadata section in the given template to an s3 bucket. :return: The template with references to artifacts that have been exported to s3. """ if "Metadata" not in template_dict: return template_dict for metadata_type, metadata_dict in template_dict["Metadata"].items(): for exporter_class in self.metadata_to_export: if exporter_class.RESOURCE_TYPE != metadata_type: continue exporter = exporter_class(self.uploader) exporter.export(metadata_type, metadata_dict, self.template_dir) return template_dict def export(self): """ Exports the local artifacts referenced by the given template to an s3 bucket. :return: The template with references to artifacts that have been exported to s3. """ self.template_dict = self.export_metadata(self.template_dict) if "Resources" not in self.template_dict: return self.template_dict self.template_dict = self.export_global_artifacts(self.template_dict) for resource_id, resource in self.template_dict["Resources"].items(): resource_type = resource.get("Type", None) resource_dict = resource.get("Properties", None) for exporter_class in self.resources_to_export: if exporter_class.RESOURCE_TYPE != resource_type: continue # Export code resources exporter = exporter_class(self.uploader) exporter.export(resource_id, resource_dict, self.template_dir) return self.template_dict ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudformation/deploy.py0000644000000000000000000003376500000000000024372 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import sys import logging from botocore.client import Config from awscli.customizations.cloudformation import exceptions from awscli.customizations.cloudformation.deployer import Deployer from awscli.customizations.s3uploader import S3Uploader from awscli.customizations.cloudformation.yamlhelper import yaml_parse from awscli.customizations.commands import BasicCommand from awscli.compat import get_stdout_text_writer from awscli.utils import write_exception LOG = logging.getLogger(__name__) class DeployCommand(BasicCommand): MSG_NO_EXECUTE_CHANGESET = \ ("Changeset created successfully. Run the following command to " "review changes:" "\n" "aws cloudformation describe-change-set --change-set-name " "{changeset_id}" "\n") MSG_EXECUTE_SUCCESS = "Successfully created/updated stack - {stack_name}\n" PARAMETER_OVERRIDE_CMD = "parameter-overrides" TAGS_CMD = "tags" NAME = 'deploy' DESCRIPTION = BasicCommand.FROM_FILE("cloudformation", "_deploy_description.rst") ARG_TABLE = [ { 'name': 'template-file', 'required': True, 'help_text': ( 'The path where your AWS CloudFormation' ' template is located.' ) }, { 'name': 'stack-name', 'action': 'store', 'required': True, 'help_text': ( 'The name of the AWS CloudFormation stack you\'re deploying to.' ' If you specify an existing stack, the command updates the' ' stack. If you specify a new stack, the command creates it.' ) }, { 'name': 's3-bucket', 'required': False, 'help_text': ( 'The name of the S3 bucket where this command uploads your ' 'CloudFormation template. This is required the deployments of ' 'templates sized greater than 51,200 bytes' ) }, { "name": "force-upload", "action": "store_true", "help_text": ( 'Indicates whether to override existing files in the S3 bucket.' ' Specify this flag to upload artifacts even if they ' ' match existing artifacts in the S3 bucket.' ) }, { 'name': 's3-prefix', 'help_text': ( 'A prefix name that the command adds to the' ' artifacts\' name when it uploads them to the S3 bucket.' ' The prefix name is a path name (folder name) for' ' the S3 bucket.' ) }, { 'name': 'kms-key-id', 'help_text': ( 'The ID of an AWS KMS key that the command uses' ' to encrypt artifacts that are at rest in the S3 bucket.' ) }, { 'name': PARAMETER_OVERRIDE_CMD, 'action': 'store', 'required': False, 'schema': { 'type': 'array', 'items': { 'type': 'string' } }, 'default': [], 'help_text': ( 'A list of parameter structures that specify input parameters' ' for your stack template. If you\'re updating a stack and you' ' don\'t specify a parameter, the command uses the stack\'s' ' existing value. For new stacks, you must specify' ' parameters that don\'t have a default value.' ' Syntax: ParameterKey1=ParameterValue1' ' ParameterKey2=ParameterValue2 ...' ) }, { 'name': 'capabilities', 'action': 'store', 'required': False, 'schema': { 'type': 'array', 'items': { 'type': 'string', 'enum': [ 'CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM' ] } }, 'default': [], 'help_text': ( 'A list of capabilities that you must specify before AWS' ' Cloudformation can create certain stacks. Some stack' ' templates might include resources that can affect' ' permissions in your AWS account, for example, by creating' ' new AWS Identity and Access Management (IAM) users. For' ' those stacks, you must explicitly acknowledge their' ' capabilities by specifying this parameter. ' ' The only valid values are CAPABILITY_IAM and' ' CAPABILITY_NAMED_IAM. If you have IAM resources, you can' ' specify either capability. If you have IAM resources with' ' custom names, you must specify CAPABILITY_NAMED_IAM. If you' ' don\'t specify this parameter, this action returns an' ' InsufficientCapabilities error.' ) }, { 'name': 'no-execute-changeset', 'action': 'store_false', 'dest': 'execute_changeset', 'required': False, 'help_text': ( 'Indicates whether to execute the change set. Specify this' ' flag if you want to view your stack changes before' ' executing the change set. The command creates an' ' AWS CloudFormation change set and then exits without' ' executing the change set. After you view the change set,' ' execute it to implement your changes.' ) }, { 'name': 'role-arn', 'required': False, 'help_text': ( 'The Amazon Resource Name (ARN) of an AWS Identity and Access ' 'Management (IAM) role that AWS CloudFormation assumes when ' 'executing the change set.' ) }, { 'name': 'notification-arns', 'required': False, 'schema': { 'type': 'array', 'items': { 'type': 'string' } }, 'help_text': ( 'Amazon Simple Notification Service topic Amazon Resource Names' ' (ARNs) that AWS CloudFormation associates with the stack.' ) }, { 'name': 'fail-on-empty-changeset', 'required': False, 'action': 'store_true', 'group_name': 'fail-on-empty-changeset', 'dest': 'fail_on_empty_changeset', 'default': True, 'help_text': ( 'Specify if the CLI should return a non-zero exit code if ' 'there are no changes to be made to the stack. The default ' 'behavior is to return a non-zero exit code.' ) }, { 'name': 'no-fail-on-empty-changeset', 'required': False, 'action': 'store_false', 'group_name': 'fail-on-empty-changeset', 'dest': 'fail_on_empty_changeset', 'default': True, 'help_text': ( 'Causes the CLI to return an exit code of 0 if there are no ' 'changes to be made to the stack.' ) }, { 'name': TAGS_CMD, 'action': 'store', 'required': False, 'schema': { 'type': 'array', 'items': { 'type': 'string' } }, 'default': [], 'help_text': ( 'A list of tags to associate with the stack that is created' ' or updated. AWS CloudFormation also propagates these tags' ' to resources in the stack if the resource supports it.' ' Syntax: TagKey1=TagValue1 TagKey2=TagValue2 ...' ) } ] def _run_main(self, parsed_args, parsed_globals): cloudformation_client = \ self._session.create_client( 'cloudformation', region_name=parsed_globals.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl) template_path = parsed_args.template_file if not os.path.isfile(template_path): raise exceptions.InvalidTemplatePathError( template_path=template_path) # Parse parameters with open(template_path, "r") as handle: template_str = handle.read() stack_name = parsed_args.stack_name parameter_overrides = self.parse_key_value_arg( parsed_args.parameter_overrides, self.PARAMETER_OVERRIDE_CMD) tags_dict = self.parse_key_value_arg(parsed_args.tags, self.TAGS_CMD) tags = [{"Key": key, "Value": value} for key, value in tags_dict.items()] template_dict = yaml_parse(template_str) parameters = self.merge_parameters(template_dict, parameter_overrides) template_size = os.path.getsize(parsed_args.template_file) if template_size > 51200 and not parsed_args.s3_bucket: raise exceptions.DeployBucketRequiredError() bucket = parsed_args.s3_bucket if bucket: s3_client = self._session.create_client( "s3", config=Config(signature_version='s3v4'), region_name=parsed_globals.region, verify=parsed_globals.verify_ssl) s3_uploader = S3Uploader(s3_client, bucket, parsed_args.s3_prefix, parsed_args.kms_key_id, parsed_args.force_upload) else: s3_uploader = None deployer = Deployer(cloudformation_client) return self.deploy(deployer, stack_name, template_str, parameters, parsed_args.capabilities, parsed_args.execute_changeset, parsed_args.role_arn, parsed_args.notification_arns, s3_uploader, tags, parsed_args.fail_on_empty_changeset) def deploy(self, deployer, stack_name, template_str, parameters, capabilities, execute_changeset, role_arn, notification_arns, s3_uploader, tags, fail_on_empty_changeset=True): try: result = deployer.create_and_wait_for_changeset( stack_name=stack_name, cfn_template=template_str, parameter_values=parameters, capabilities=capabilities, role_arn=role_arn, notification_arns=notification_arns, s3_uploader=s3_uploader, tags=tags ) except exceptions.ChangeEmptyError as ex: if fail_on_empty_changeset: raise write_exception(ex, outfile=get_stdout_text_writer()) return 0 if execute_changeset: deployer.execute_changeset(result.changeset_id, stack_name) deployer.wait_for_execute(stack_name, result.changeset_type) sys.stdout.write(self.MSG_EXECUTE_SUCCESS.format( stack_name=stack_name)) else: sys.stdout.write(self.MSG_NO_EXECUTE_CHANGESET.format( changeset_id=result.changeset_id)) sys.stdout.flush() return 0 def merge_parameters(self, template_dict, parameter_overrides): """ CloudFormation CreateChangeset requires a value for every parameter from the template, either specifying a new value or use previous value. For convenience, this method will accept new parameter values and generates a dict of all parameters in a format that ChangeSet API will accept :param parameter_overrides: :return: """ parameter_values = [] if not isinstance(template_dict.get("Parameters", None), dict): return parameter_values for key, value in template_dict["Parameters"].items(): obj = { "ParameterKey": key } if key in parameter_overrides: obj["ParameterValue"] = parameter_overrides[key] else: obj["UsePreviousValue"] = True parameter_values.append(obj) return parameter_values def parse_key_value_arg(self, arg_value, argname): """ Converts arguments that are passed as list of "Key=Value" strings into a real dictionary. :param arg_value list: Array of strings, where each string is of form Key=Value :param argname string: Name of the argument that contains the value :return dict: Dictionary representing the key/value pairs """ result = {} for data in arg_value: # Split at first '=' from left key_value_pair = data.split("=", 1) if len(key_value_pair) != 2: raise exceptions.InvalidKeyValuePairArgumentError( argname=argname, value=key_value_pair) result[key_value_pair[0]] = key_value_pair[1] return result ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudformation/deployer.py0000644000000000000000000002271200000000000024707 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys import time import logging import botocore import collections from awscli.customizations.cloudformation import exceptions from awscli.customizations.cloudformation.artifact_exporter import mktempfile, parse_s3_url from datetime import datetime LOG = logging.getLogger(__name__) ChangeSetResult = collections.namedtuple( "ChangeSetResult", ["changeset_id", "changeset_type"]) class Deployer(object): def __init__(self, cloudformation_client, changeset_prefix="awscli-cloudformation-package-deploy-"): self._client = cloudformation_client self.changeset_prefix = changeset_prefix def has_stack(self, stack_name): """ Checks if a CloudFormation stack with given name exists :param stack_name: Name or ID of the stack :return: True if stack exists. False otherwise """ try: resp = self._client.describe_stacks(StackName=stack_name) if len(resp["Stacks"]) != 1: return False # When you run CreateChangeSet on a a stack that does not exist, # CloudFormation will create a stack and set it's status # REVIEW_IN_PROGRESS. However this stack is cannot be manipulated # by "update" commands. Under this circumstances, we treat like # this stack does not exist and call CreateChangeSet will # ChangeSetType set to CREATE and not UPDATE. stack = resp["Stacks"][0] return stack["StackStatus"] != "REVIEW_IN_PROGRESS" except botocore.exceptions.ClientError as e: # If a stack does not exist, describe_stacks will throw an # exception. Unfortunately we don't have a better way than parsing # the exception msg to understand the nature of this exception. msg = str(e) if "Stack with id {0} does not exist".format(stack_name) in msg: LOG.debug("Stack with id {0} does not exist".format( stack_name)) return False else: # We don't know anything about this exception. Don't handle LOG.debug("Unable to get stack details.", exc_info=e) raise e def create_changeset(self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags): """ Call Cloudformation to create a changeset and wait for it to complete :param stack_name: Name or ID of stack :param cfn_template: CloudFormation template string :param parameter_values: Template parameters object :param capabilities: Array of capabilities passed to CloudFormation :param tags: Array of tags passed to CloudFormation :return: """ now = datetime.utcnow().isoformat() description = "Created by AWS CLI at {0} UTC".format(now) # Each changeset will get a unique name based on time changeset_name = self.changeset_prefix + str(int(time.time())) if not self.has_stack(stack_name): changeset_type = "CREATE" # When creating a new stack, UsePreviousValue=True is invalid. # For such parameters, users should either override with new value, # or set a Default value in template to successfully create a stack. parameter_values = [x for x in parameter_values if not x.get("UsePreviousValue", False)] else: changeset_type = "UPDATE" # UsePreviousValue not valid if parameter is new summary = self._client.get_template_summary(StackName=stack_name) existing_parameters = [parameter['ParameterKey'] for parameter in \ summary['Parameters']] parameter_values = [x for x in parameter_values if not (x.get("UsePreviousValue", False) and \ x["ParameterKey"] not in existing_parameters)] kwargs = { 'ChangeSetName': changeset_name, 'StackName': stack_name, 'TemplateBody': cfn_template, 'ChangeSetType': changeset_type, 'Parameters': parameter_values, 'Capabilities': capabilities, 'Description': description, 'Tags': tags, } # If an S3 uploader is available, use TemplateURL to deploy rather than # TemplateBody. This is required for large templates. if s3_uploader: with mktempfile() as temporary_file: temporary_file.write(kwargs.pop('TemplateBody')) temporary_file.flush() url = s3_uploader.upload_with_dedup( temporary_file.name, "template") # TemplateUrl property requires S3 URL to be in path-style format parts = parse_s3_url(url, version_property="Version") kwargs['TemplateURL'] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None)) # don't set these arguments if not specified to use existing values if role_arn is not None: kwargs['RoleARN'] = role_arn if notification_arns is not None: kwargs['NotificationARNs'] = notification_arns try: resp = self._client.create_change_set(**kwargs) return ChangeSetResult(resp["Id"], changeset_type) except Exception as ex: LOG.debug("Unable to create changeset", exc_info=ex) raise ex def wait_for_changeset(self, changeset_id, stack_name): """ Waits until the changeset creation completes :param changeset_id: ID or name of the changeset :param stack_name: Stack name :return: Latest status of the create-change-set operation """ sys.stdout.write("\nWaiting for changeset to be created..\n") sys.stdout.flush() # Wait for changeset to be created waiter = self._client.get_waiter("change_set_create_complete") # Poll every 5 seconds. Changeset creation should be fast waiter_config = {'Delay': 5} try: waiter.wait(ChangeSetName=changeset_id, StackName=stack_name, WaiterConfig=waiter_config) except botocore.exceptions.WaiterError as ex: LOG.debug("Create changeset waiter exception", exc_info=ex) resp = ex.last_response status = resp["Status"] reason = resp["StatusReason"] if status == "FAILED" and \ "The submitted information didn't contain changes." in reason or \ "No updates are to be performed" in reason: raise exceptions.ChangeEmptyError(stack_name=stack_name) raise RuntimeError("Failed to create the changeset: {0} " "Status: {1}. Reason: {2}" .format(ex, status, reason)) def execute_changeset(self, changeset_id, stack_name): """ Calls CloudFormation to execute changeset :param changeset_id: ID of the changeset :param stack_name: Name or ID of the stack :return: Response from execute-change-set call """ return self._client.execute_change_set( ChangeSetName=changeset_id, StackName=stack_name) def wait_for_execute(self, stack_name, changeset_type): sys.stdout.write("Waiting for stack create/update to complete\n") sys.stdout.flush() # Pick the right waiter if changeset_type == "CREATE": waiter = self._client.get_waiter("stack_create_complete") elif changeset_type == "UPDATE": waiter = self._client.get_waiter("stack_update_complete") else: raise RuntimeError("Invalid changeset type {0}" .format(changeset_type)) # Poll every 30 seconds. Polling too frequently risks hitting rate limits # on CloudFormation's DescribeStacks API waiter_config = { 'Delay': 30, 'MaxAttempts': 120, } try: waiter.wait(StackName=stack_name, WaiterConfig=waiter_config) except botocore.exceptions.WaiterError as ex: LOG.debug("Execute changeset waiter exception", exc_info=ex) raise exceptions.DeployFailedError(stack_name=stack_name) def create_and_wait_for_changeset(self, stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags): result = self.create_changeset( stack_name, cfn_template, parameter_values, capabilities, role_arn, notification_arns, s3_uploader, tags) self.wait_for_changeset(result.changeset_id, stack_name) return result ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudformation/exceptions.py0000644000000000000000000000364100000000000025245 0ustar00rootroot00000000000000 class CloudFormationCommandError(Exception): fmt = 'An unspecified error occurred' def __init__(self, **kwargs): msg = self.fmt.format(**kwargs) Exception.__init__(self, msg) self.kwargs = kwargs class InvalidTemplatePathError(CloudFormationCommandError): fmt = "Invalid template path {template_path}" class ChangeEmptyError(CloudFormationCommandError): fmt = "No changes to deploy. Stack {stack_name} is up to date" class InvalidLocalPathError(CloudFormationCommandError): fmt = ("Parameter {property_name} of resource {resource_id} refers " "to a file or folder that does not exist {local_path}") class InvalidTemplateUrlParameterError(CloudFormationCommandError): fmt = ("{property_name} parameter of {resource_id} resource is invalid. " "It must be a S3 URL or path to CloudFormation " "template file. Actual: {template_path}") class ExportFailedError(CloudFormationCommandError): fmt = ("Unable to upload artifact {property_value} referenced " "by {property_name} parameter of {resource_id} resource." "\n" "{ex}") class InvalidKeyValuePairArgumentError(CloudFormationCommandError): fmt = ("{value} value passed to --{argname} must be of format " "Key=Value") class DeployFailedError(CloudFormationCommandError): fmt = \ ("Failed to create/update the stack. Run the following command" "\n" "to fetch the list of events leading up to the failure" "\n" "aws cloudformation describe-stack-events --stack-name {stack_name}") class DeployBucketRequiredError(CloudFormationCommandError): fmt = \ ("Templates with a size greater than 51,200 bytes must be deployed " "via an S3 Bucket. Please add the --s3-bucket parameter to your " "command. The local template will be copied to that S3 bucket and " "then deployed.") ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudformation/package.py0000644000000000000000000001365600000000000024466 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import logging import sys import json from botocore.client import Config from awscli.customizations.cloudformation.artifact_exporter import Template from awscli.customizations.cloudformation.yamlhelper import yaml_dump from awscli.customizations.cloudformation import exceptions from awscli.customizations.commands import BasicCommand from awscli.customizations.s3uploader import S3Uploader LOG = logging.getLogger(__name__) class PackageCommand(BasicCommand): MSG_PACKAGED_TEMPLATE_WRITTEN = ( "Successfully packaged artifacts and wrote output template " "to file {output_file_name}." "\n" "Execute the following command to deploy the packaged template" "\n" "aws cloudformation deploy --template-file {output_file_path} " "--stack-name " "\n") NAME = "package" DESCRIPTION = BasicCommand.FROM_FILE("cloudformation", "_package_description.rst") ARG_TABLE = [ { 'name': 'template-file', 'required': True, 'help_text': ( 'The path where your AWS CloudFormation' ' template is located.' ) }, { 'name': 's3-bucket', 'required': True, 'help_text': ( 'The name of the S3 bucket where this command uploads' ' the artifacts that are referenced in your template.' ) }, { 'name': 's3-prefix', 'help_text': ( 'A prefix name that the command adds to the' ' artifacts\' name when it uploads them to the S3 bucket.' ' The prefix name is a path name (folder name) for' ' the S3 bucket.' ) }, { 'name': 'kms-key-id', 'help_text': ( 'The ID of an AWS KMS key that the command uses' ' to encrypt artifacts that are at rest in the S3 bucket.' ) }, { "name": "output-template-file", "help_text": ( "The path to the file where the command writes the" " output AWS CloudFormation template. If you don't specify" " a path, the command writes the template to the standard" " output." ) }, { "name": "use-json", "action": "store_true", "help_text": ( "Indicates whether to use JSON as the format for the output AWS" " CloudFormation template. YAML is used by default." ) }, { "name": "force-upload", "action": "store_true", "help_text": ( 'Indicates whether to override existing files in the S3 bucket.' ' Specify this flag to upload artifacts even if they ' ' match existing artifacts in the S3 bucket.' ) }, { "name": "metadata", "cli_type_name": "map", "schema": { "type": "map", "key": {"type": "string"}, "value": {"type": "string"} }, "help_text": "A map of metadata to attach to *ALL* the artifacts that" " are referenced in your template." } ] def _run_main(self, parsed_args, parsed_globals): s3_client = self._session.create_client( "s3", config=Config(signature_version='s3v4'), region_name=parsed_globals.region, verify=parsed_globals.verify_ssl) template_path = parsed_args.template_file if not os.path.isfile(template_path): raise exceptions.InvalidTemplatePathError( template_path=template_path) bucket = parsed_args.s3_bucket self.s3_uploader = S3Uploader(s3_client, bucket, parsed_args.s3_prefix, parsed_args.kms_key_id, parsed_args.force_upload) # attach the given metadata to the artifacts to be uploaded self.s3_uploader.artifact_metadata = parsed_args.metadata output_file = parsed_args.output_template_file use_json = parsed_args.use_json exported_str = self._export(template_path, use_json) sys.stdout.write("\n") self.write_output(output_file, exported_str) if output_file: msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format( output_file_name=output_file, output_file_path=os.path.abspath(output_file)) sys.stdout.write(msg) sys.stdout.flush() return 0 def _export(self, template_path, use_json): template = Template(template_path, os.getcwd(), self.s3_uploader) exported_template = template.export() if use_json: exported_str = json.dumps(exported_template, indent=4, ensure_ascii=False) else: exported_str = yaml_dump(exported_template) return exported_str def write_output(self, output_file_name, data): if output_file_name is None: sys.stdout.write(data) return with open(output_file_name, "w") as fp: fp.write(data) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudformation/yamlhelper.py0000644000000000000000000000642600000000000025232 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from botocore.compat import json from botocore.compat import OrderedDict import yaml from yaml.resolver import ScalarNode, SequenceNode from awscli.compat import six def intrinsics_multi_constructor(loader, tag_prefix, node): """ YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the instrinsic name """ # Get the actual tag name excluding the first exclamation tag = node.tag[1:] # Some intrinsic functions doesn't support prefix "Fn::" prefix = "Fn::" if tag in ["Ref", "Condition"]: prefix = "" cfntag = prefix + tag if tag == "GetAtt" and isinstance(node.value, six.string_types): # ShortHand notation for !GetAtt accepts Resource.Attribute format # while the standard notation is to use an array # [Resource, Attribute]. Convert shorthand to standard format value = node.value.split(".", 1) elif isinstance(node, ScalarNode): # Value of this node is scalar value = loader.construct_scalar(node) elif isinstance(node, SequenceNode): # Value of this node is an array (Ex: [1,2]) value = loader.construct_sequence(node) else: # Value of this node is an mapping (ex: {foo: bar}) value = loader.construct_mapping(node) return {cfntag: value} def _dict_representer(dumper, data): return dumper.represent_dict(data.items()) def yaml_dump(dict_to_dump): """ Dumps the dictionary as a YAML document :param dict_to_dump: :return: """ FlattenAliasDumper.add_representer(OrderedDict, _dict_representer) return yaml.dump( dict_to_dump, default_flow_style=False, Dumper=FlattenAliasDumper, ) def _dict_constructor(loader, node): # Necessary in order to make yaml merge tags work loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) class SafeLoaderWrapper(yaml.SafeLoader): """Isolated safe loader to allow for customizations without global changes. """ pass def yaml_parse(yamlstr): """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. return json.loads(yamlstr, object_pairs_hook=OrderedDict) except ValueError: loader = SafeLoaderWrapper loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor) loader.add_multi_constructor("!", intrinsics_multi_constructor) return yaml.load(yamlstr, loader) class FlattenAliasDumper(yaml.SafeDumper): def ignore_aliases(self, data): return True ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudfront.py0000644000000000000000000002452400000000000022221 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys import time import random import rsa from botocore.utils import parse_to_aware_datetime from botocore.signers import CloudFrontSigner from awscli.arguments import CustomArgument from awscli.customizations.utils import validate_mutually_exclusive_handler from awscli.customizations.commands import BasicCommand def register(event_handler): event_handler.register('building-command-table.cloudfront', _add_sign) # Provides a simpler --paths for ``aws cloudfront create-invalidation`` event_handler.register( 'building-argument-table.cloudfront.create-invalidation', _add_paths) event_handler.register( 'operation-args-parsed.cloudfront.create-invalidation', validate_mutually_exclusive_handler(['invalidation_batch'], ['paths'])) event_handler.register( 'operation-args-parsed.cloudfront.create-distribution', validate_mutually_exclusive_handler( ['default_root_object', 'origin_domain_name'], ['distribution_config'])) event_handler.register( 'building-argument-table.cloudfront.create-distribution', lambda argument_table, **kwargs: argument_table.__setitem__( 'origin-domain-name', OriginDomainName(argument_table))) event_handler.register( 'building-argument-table.cloudfront.create-distribution', lambda argument_table, **kwargs: argument_table.__setitem__( 'default-root-object', CreateDefaultRootObject(argument_table))) context = {} event_handler.register( 'top-level-args-parsed', context.update, unique_id='cloudfront') event_handler.register( 'operation-args-parsed.cloudfront.update-distribution', validate_mutually_exclusive_handler( ['default_root_object'], ['distribution_config'])) event_handler.register( 'building-argument-table.cloudfront.update-distribution', lambda argument_table, **kwargs: argument_table.__setitem__( 'default-root-object', UpdateDefaultRootObject( context=context, argument_table=argument_table))) def unique_string(prefix='cli'): return '%s-%s-%s' % (prefix, int(time.time()), random.randint(1, 1000000)) def _add_paths(argument_table, **kwargs): argument_table['invalidation-batch'].required = False argument_table['paths'] = PathsArgument() class PathsArgument(CustomArgument): def __init__(self): doc = ( 'The space-separated paths to be invalidated.' ' Note: --invalidation-batch and --paths are mututally exclusive.' ) super(PathsArgument, self).__init__('paths', nargs='+', help_text=doc) def add_to_params(self, parameters, value): if value is not None: parameters['InvalidationBatch'] = { "CallerReference": unique_string(), "Paths": {"Quantity": len(value), "Items": value}, } class ExclusiveArgument(CustomArgument): DOC = '%s This argument and --%s are mututally exclusive.' def __init__(self, name, argument_table, exclusive_to='distribution-config', help_text=''): argument_table[exclusive_to].required = False super(ExclusiveArgument, self).__init__( name, help_text=self.DOC % (help_text, exclusive_to)) def distribution_config_template(self): return { "CallerReference": unique_string(), "Origins": {"Quantity": 0, "Items": []}, "DefaultCacheBehavior": { "TargetOriginId": "placeholder", "ForwardedValues": { "QueryString": False, "Cookies": {"Forward": "none"}, }, "TrustedSigners": { "Enabled": False, "Quantity": 0 }, "ViewerProtocolPolicy": "allow-all", "MinTTL": 0 }, "Enabled": True, "Comment": "", } class OriginDomainName(ExclusiveArgument): def __init__(self, argument_table): super(OriginDomainName, self).__init__( 'origin-domain-name', argument_table, help_text='The domain name for your origin.') def add_to_params(self, parameters, value): if value is None: return parameters.setdefault( 'DistributionConfig', self.distribution_config_template()) origin_id = unique_string(prefix=value) item = {"Id": origin_id, "DomainName": value, "OriginPath": ''} if item['DomainName'].endswith('.s3.amazonaws.com'): # We do not need to detect '.s3[\w-].amazonaws.com' as S3 buckets, # because CloudFront treats GovCloud S3 buckets as custom domain. # http://docs.aws.amazon.com/govcloud-us/latest/UserGuide/setting-up-cloudfront.html item["S3OriginConfig"] = {"OriginAccessIdentity": ""} else: item["CustomOriginConfig"] = { 'HTTPPort': 80, 'HTTPSPort': 443, 'OriginProtocolPolicy': 'http-only'} parameters['DistributionConfig']['Origins'] = { "Quantity": 1, "Items": [item]} parameters['DistributionConfig']['DefaultCacheBehavior'][ 'TargetOriginId'] = origin_id class CreateDefaultRootObject(ExclusiveArgument): def __init__(self, argument_table, help_text=''): super(CreateDefaultRootObject, self).__init__( 'default-root-object', argument_table, help_text=help_text or ( 'The object that you want CloudFront to return (for example, ' 'index.html) when a viewer request points to your root URL.')) def add_to_params(self, parameters, value): if value is not None: parameters.setdefault( 'DistributionConfig', self.distribution_config_template()) parameters['DistributionConfig']['DefaultRootObject'] = value class UpdateDefaultRootObject(CreateDefaultRootObject): def __init__(self, context, argument_table): super(UpdateDefaultRootObject, self).__init__( argument_table, help_text=( 'The object that you want CloudFront to return (for example, ' 'index.html) when a viewer request points to your root URL. ' 'CLI will automatically make a get-distribution-config call ' 'to load and preserve your other settings.')) self.context = context def add_to_params(self, parameters, value): if value is not None: client = self.context['session'].create_client( 'cloudfront', region_name=self.context['parsed_args'].region, endpoint_url=self.context['parsed_args'].endpoint_url, verify=self.context['parsed_args'].verify_ssl) response = client.get_distribution_config(Id=parameters['Id']) parameters['IfMatch'] = response['ETag'] parameters['DistributionConfig'] = response['DistributionConfig'] parameters['DistributionConfig']['DefaultRootObject'] = value def _add_sign(command_table, session, **kwargs): command_table['sign'] = SignCommand(session) class SignCommand(BasicCommand): NAME = 'sign' DESCRIPTION = 'Sign a given url.' DATE_FORMAT = """Supported formats include: YYYY-MM-DD (which means 0AM UTC of that day), YYYY-MM-DDThh:mm:ss (with default timezone as UTC), YYYY-MM-DDThh:mm:ss+hh:mm or YYYY-MM-DDThh:mm:ss-hh:mm (with offset), or EpochTime (which always means UTC). Do NOT use YYYYMMDD, because it will be treated as EpochTime.""" ARG_TABLE = [ { 'name': 'url', 'no_paramfile': True, # To disable the default paramfile behavior 'required': True, 'help_text': 'The URL to be signed', }, { 'name': 'key-pair-id', 'required': True, 'help_text': ( "The active CloudFront key pair Id for the key pair " "that you're using to generate the signature."), }, { 'name': 'private-key', 'required': True, 'help_text': 'file://path/to/your/private-key.pem', }, { 'name': 'date-less-than', 'required': True, 'help_text': 'The expiration date and time for the URL. ' + DATE_FORMAT, }, { 'name': 'date-greater-than', 'help_text': 'An optional start date and time for the URL. ' + DATE_FORMAT, }, { 'name': 'ip-address', 'help_text': ( 'An optional IP address or IP address range to allow client ' 'making the GET request from. Format: x.x.x.x/x or x.x.x.x'), }, ] def _run_main(self, args, parsed_globals): signer = CloudFrontSigner( args.key_pair_id, RSASigner(args.private_key).sign) date_less_than = parse_to_aware_datetime(args.date_less_than) date_greater_than = args.date_greater_than if date_greater_than is not None: date_greater_than = parse_to_aware_datetime(date_greater_than) if date_greater_than is not None or args.ip_address is not None: policy = signer.build_policy( args.url, date_less_than, date_greater_than=date_greater_than, ip_address=args.ip_address) sys.stdout.write(signer.generate_presigned_url( args.url, policy=policy)) else: sys.stdout.write(signer.generate_presigned_url( args.url, date_less_than=date_less_than)) return 0 class RSASigner(object): def __init__(self, private_key): self.priv_key = rsa.PrivateKey.load_pkcs1(private_key.encode('utf8')) def sign(self, message): return rsa.sign(message, self.priv_key, 'SHA-1') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudsearch.py0000644000000000000000000001030300000000000022324 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging from awscli.customizations.flatten import FlattenArguments, SEP from botocore.compat import OrderedDict LOG = logging.getLogger(__name__) DEFAULT_VALUE_TYPE_MAP = { 'Int': int, 'Double': float, 'IntArray': int, 'DoubleArray': float } def index_hydrate(params, container, cli_type, key, value): """ Hydrate an index-field option value to construct something like:: { 'index_field': { 'DoubleOptions': { 'DefaultValue': 0.0 } } } """ if 'IndexField' not in params: params['IndexField'] = {} if 'IndexFieldType' not in params['IndexField']: raise RuntimeError('You must pass the --type option.') # Find the type and transform it for the type options field name # E.g: int-array => IntArray _type = params['IndexField']['IndexFieldType'] _type = ''.join([i.capitalize() for i in _type.split('-')]) # ``index_field`` of type ``latlon`` is mapped to ``Latlon``. # However, it is defined as ``LatLon`` in the model so it needs to # be changed. if _type == 'Latlon': _type = 'LatLon' # Transform string value to the correct type? if key.split(SEP)[-1] == 'DefaultValue': value = DEFAULT_VALUE_TYPE_MAP.get(_type, lambda x: x)(value) # Set the proper options field if _type + 'Options' not in params['IndexField']: params['IndexField'][_type + 'Options'] = {} params['IndexField'][_type + 'Options'][key.split(SEP)[-1]] = value FLATTEN_CONFIG = { "define-expression": { "expression": { "keep": False, "flatten": OrderedDict([ # Order is crucial here! We're # flattening ExpressionValue to be "expression", # but this is the name ("expression") of the our parent # key, the top level nested param. ("ExpressionName", {"name": "name"}), ("ExpressionValue", {"name": "expression"}),]), } }, "define-index-field": { "index-field": { "keep": False, # We use an ordered dict because `type` needs to be parsed before # any of the Options values. "flatten": OrderedDict([ ("IndexFieldName", {"name": "name"}), ("IndexFieldType", {"name": "type"}), ("IntOptions.DefaultValue", {"name": "default-value", "type": "string", "hydrate": index_hydrate}), ("IntOptions.FacetEnabled", {"name": "facet-enabled", "hydrate": index_hydrate }), ("IntOptions.SearchEnabled", {"name": "search-enabled", "hydrate": index_hydrate}), ("IntOptions.ReturnEnabled", {"name": "return-enabled", "hydrate": index_hydrate}), ("IntOptions.SortEnabled", {"name": "sort-enabled", "hydrate": index_hydrate}), ("TextOptions.HighlightEnabled", {"name": "highlight-enabled", "hydrate": index_hydrate}), ("TextOptions.AnalysisScheme", {"name": "analysis-scheme", "hydrate": index_hydrate}) ]) } } } def initialize(cli): """ The entry point for CloudSearch customizations. """ flattened = FlattenArguments('cloudsearch', FLATTEN_CONFIG) flattened.register(cli) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudsearchdomain.py0000644000000000000000000000206200000000000023517 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """Customizations for the cloudsearchdomain command. This module customizes the cloudsearchdomain command: * Add validation that --endpoint-url is required. """ def register_cloudsearchdomain(cli): cli.register_last('calling-command.cloudsearchdomain', validate_endpoint_url) def validate_endpoint_url(parsed_globals, **kwargs): if parsed_globals.endpoint_url is None: return ValueError( "--endpoint-url is required for cloudsearchdomain commands") ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0892723 awscli-1.22.34/awscli/customizations/cloudtrail/0000755000000000000000000000000000000000000021623 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudtrail/__init__.py0000644000000000000000000000247100000000000023740 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from .subscribe import CloudTrailSubscribe, CloudTrailUpdate from .validation import CloudTrailValidateLogs def initialize(cli): """ The entry point for CloudTrail high level commands. """ cli.register('building-command-table.cloudtrail', inject_commands) def inject_commands(command_table, session, **kwargs): """ Called when the CloudTrail command table is being built. Used to inject new high level commands into the command list. These high level commands must not collide with existing low-level API call names. """ command_table['create-subscription'] = CloudTrailSubscribe(session) command_table['update-subscription'] = CloudTrailUpdate(session) command_table['validate-logs'] = CloudTrailValidateLogs(session) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudtrail/subscribe.py0000644000000000000000000003254100000000000024163 0ustar00rootroot00000000000000# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import json import logging import sys from .utils import get_account_id from awscli.customizations.commands import BasicCommand from awscli.customizations.utils import s3_bucket_exists from botocore.exceptions import ClientError LOG = logging.getLogger(__name__) S3_POLICY_TEMPLATE = 'policy/S3/AWSCloudTrail-S3BucketPolicy-2014-12-17.json' SNS_POLICY_TEMPLATE = 'policy/SNS/AWSCloudTrail-SnsTopicPolicy-2014-12-17.json' class CloudTrailError(Exception): pass class CloudTrailSubscribe(BasicCommand): """ Subscribe/update a user account to CloudTrail, creating the required S3 bucket, the optional SNS topic, and starting the CloudTrail monitoring and logging. """ NAME = 'create-subscription' DESCRIPTION = ('Creates and configures the AWS resources necessary to use' ' CloudTrail, creates a trail using those resources, and ' 'turns on logging.') SYNOPSIS = ('aws cloudtrail create-subscription' ' (--s3-use-bucket|--s3-new-bucket) bucket-name' ' [--sns-new-topic topic-name]\n') ARG_TABLE = [ {'name': 'name', 'required': True, 'help_text': 'Cloudtrail name'}, {'name': 's3-new-bucket', 'help_text': 'Create a new S3 bucket with this name'}, {'name': 's3-use-bucket', 'help_text': 'Use an existing S3 bucket with this name'}, {'name': 's3-prefix', 'help_text': 'S3 object prefix'}, {'name': 'sns-new-topic', 'help_text': 'Create a new SNS topic with this name'}, {'name': 'include-global-service-events', 'help_text': 'Whether to include global service events'}, {'name': 's3-custom-policy', 'help_text': 'Custom S3 policy template or URL'}, {'name': 'sns-custom-policy', 'help_text': 'Custom SNS policy template or URL'} ] UPDATE = False _UNDOCUMENTED = True def _run_main(self, args, parsed_globals): self.setup_services(args, parsed_globals) # Run the command and report success self._call(args, parsed_globals) return 0 def setup_services(self, args, parsed_globals): client_args = { 'region_name': None, 'verify': None } if parsed_globals.region is not None: client_args['region_name'] = parsed_globals.region if parsed_globals.verify_ssl is not None: client_args['verify'] = parsed_globals.verify_ssl # Initialize services LOG.debug('Initializing S3, SNS and CloudTrail...') self.sts = self._session.create_client('sts', **client_args) self.s3 = self._session.create_client('s3', **client_args) self.sns = self._session.create_client('sns', **client_args) self.region_name = self.s3.meta.region_name # If the endpoint is specified, it is designated for the cloudtrail # service. Not all of the other services will use it. if parsed_globals.endpoint_url is not None: client_args['endpoint_url'] = parsed_globals.endpoint_url self.cloudtrail = self._session.create_client('cloudtrail', **client_args) def _call(self, options, parsed_globals): """ Run the command. Calls various services based on input options and outputs the final CloudTrail configuration. """ gse = options.include_global_service_events if gse: if gse.lower() == 'true': gse = True elif gse.lower() == 'false': gse = False else: raise ValueError('You must pass either true or false to' ' --include-global-service-events.') bucket = options.s3_use_bucket if options.s3_new_bucket: bucket = options.s3_new_bucket if self.UPDATE and options.s3_prefix is None: # Prefix was not passed and this is updating the S3 bucket, # so let's find the existing prefix and use that if possible res = self.cloudtrail.describe_trails( trailNameList=[options.name]) trail_info = res['trailList'][0] if 'S3KeyPrefix' in trail_info: LOG.debug('Setting S3 prefix to {0}'.format( trail_info['S3KeyPrefix'])) options.s3_prefix = trail_info['S3KeyPrefix'] self.setup_new_bucket(bucket, options.s3_prefix, options.s3_custom_policy) elif not bucket and not self.UPDATE: # No bucket was passed for creation. raise ValueError('You must pass either --s3-use-bucket or' ' --s3-new-bucket to create.') if options.sns_new_topic: try: topic_result = self.setup_new_topic(options.sns_new_topic, options.sns_custom_policy) except Exception: # Roll back any S3 bucket creation if options.s3_new_bucket: self.s3.delete_bucket(Bucket=options.s3_new_bucket) raise try: cloudtrail_config = self.upsert_cloudtrail_config( options.name, bucket, options.s3_prefix, options.sns_new_topic, gse ) except Exception: # Roll back any S3 bucket / SNS topic creations if options.s3_new_bucket: self.s3.delete_bucket(Bucket=options.s3_new_bucket) if options.sns_new_topic: self.sns.delete_topic(TopicArn=topic_result['TopicArn']) raise sys.stdout.write('CloudTrail configuration:\n{config}\n'.format( config=json.dumps(cloudtrail_config, indent=2))) if not self.UPDATE: # If the configure call command above completes then this should # have a really high chance of also completing self.start_cloudtrail(options.name) sys.stdout.write( 'Logs will be delivered to {bucket}:{prefix}\n'.format( bucket=bucket, prefix=options.s3_prefix or '')) def _get_policy(self, key_name): try: data = self.s3.get_object( Bucket='awscloudtrail-policy-' + self.region_name, Key=key_name) return data['Body'].read().decode('utf-8') except Exception as e: raise CloudTrailError( 'Unable to get regional policy template for' ' region %s: %s. Error: %s', self.region_name, key_name, e) def setup_new_bucket(self, bucket, prefix, custom_policy=None): """ Creates a new S3 bucket with an appropriate policy to let CloudTrail write to the prefix path. """ sys.stdout.write( 'Setting up new S3 bucket {bucket}...\n'.format(bucket=bucket)) account_id = get_account_id(self.sts) # Clean up the prefix - it requires a trailing slash if set if prefix and not prefix.endswith('/'): prefix += '/' # Fetch policy data from S3 or a custom URL if custom_policy is not None: policy = custom_policy else: policy = self._get_policy(S3_POLICY_TEMPLATE) policy = policy.replace('', bucket)\ .replace('', account_id) if '/' in policy: policy = policy.replace('/', prefix or '') else: policy = policy.replace('', prefix or '') LOG.debug('Bucket policy:\n{0}'.format(policy)) bucket_exists = s3_bucket_exists(self.s3, bucket) if bucket_exists: raise Exception('Bucket {bucket} already exists.'.format( bucket=bucket)) # If we are not using the us-east-1 region, then we must set # a location constraint on the new bucket. params = {'Bucket': bucket} if self.region_name != 'us-east-1': bucket_config = {'LocationConstraint': self.region_name} params['CreateBucketConfiguration'] = bucket_config data = self.s3.create_bucket(**params) try: self.s3.put_bucket_policy(Bucket=bucket, Policy=policy) except ClientError: # Roll back bucket creation. self.s3.delete_bucket(Bucket=bucket) raise return data def setup_new_topic(self, topic, custom_policy=None): """ Creates a new SNS topic with an appropriate policy to let CloudTrail post messages to the topic. """ sys.stdout.write( 'Setting up new SNS topic {topic}...\n'.format(topic=topic)) account_id = get_account_id(self.sts) # Make sure topic doesn't already exist # Warn but do not fail if ListTopics permissions # are missing from the IAM role? try: topics = self.sns.list_topics()['Topics'] except Exception: topics = [] LOG.warn('Unable to list topics, continuing...') if [t for t in topics if t['TopicArn'].split(':')[-1] == topic]: raise Exception('Topic {topic} already exists.'.format( topic=topic)) region = self.sns.meta.region_name # Get the SNS topic policy information to allow CloudTrail # write-access. if custom_policy is not None: policy = custom_policy else: policy = self._get_policy(SNS_POLICY_TEMPLATE) policy = policy.replace('', region)\ .replace('', account_id)\ .replace('', topic) topic_result = self.sns.create_topic(Name=topic) try: # Merge any existing topic policy with our new policy statements topic_attr = self.sns.get_topic_attributes( TopicArn=topic_result['TopicArn']) policy = self.merge_sns_policy(topic_attr['Attributes']['Policy'], policy) LOG.debug('Topic policy:\n{0}'.format(policy)) # Set the topic policy self.sns.set_topic_attributes(TopicArn=topic_result['TopicArn'], AttributeName='Policy', AttributeValue=policy) except Exception: # Roll back topic creation self.sns.delete_topic(TopicArn=topic_result['TopicArn']) raise return topic_result def merge_sns_policy(self, left, right): """ Merge two SNS topic policy documents. The id information from ``left`` is used in the final document, and the statements from ``right`` are merged into ``left``. http://docs.aws.amazon.com/sns/latest/dg/BasicStructure.html :type left: string :param left: First policy JSON document :type right: string :param right: Second policy JSON document :rtype: string :return: Merged policy JSON """ left_parsed = json.loads(left) right_parsed = json.loads(right) left_parsed['Statement'] += right_parsed['Statement'] return json.dumps(left_parsed) def upsert_cloudtrail_config(self, name, bucket, prefix, topic, gse): """ Either create or update the CloudTrail configuration depending on whether this command is a create or update command. """ sys.stdout.write('Creating/updating CloudTrail configuration...\n') config = { 'Name': name } if bucket is not None: config['S3BucketName'] = bucket if prefix is not None: config['S3KeyPrefix'] = prefix if topic is not None: config['SnsTopicName'] = topic if gse is not None: config['IncludeGlobalServiceEvents'] = gse if not self.UPDATE: self.cloudtrail.create_trail(**config) else: self.cloudtrail.update_trail(**config) return self.cloudtrail.describe_trails() def start_cloudtrail(self, name): """ Start the CloudTrail service, which begins logging. """ sys.stdout.write('Starting CloudTrail service...\n') return self.cloudtrail.start_logging(Name=name) class CloudTrailUpdate(CloudTrailSubscribe): """ Like subscribe above, but the update version of the command. """ NAME = 'update-subscription' UPDATE = True DESCRIPTION = ('Updates any of the trail configuration settings, and' ' creates and configures any new AWS resources specified.') SYNOPSIS = ('aws cloudtrail update-subscription' ' [(--s3-use-bucket|--s3-new-bucket) bucket-name]' ' [--sns-new-topic topic-name]\n') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudtrail/utils.py0000644000000000000000000000231000000000000023331 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. def get_account_id_from_arn(trail_arn): """Gets the account ID portion of an ARN""" return trail_arn.split(':')[4] def get_account_id(sts_client): """Retrieve the AWS account ID for the authenticated user or role""" response = sts_client.get_caller_identity() return response['Account'] def get_trail_by_arn(cloudtrail_client, trail_arn): """Gets trail information based on the trail's ARN""" trails = cloudtrail_client.describe_trails()['trailList'] for trail in trails: if trail.get('TrailARN', None) == trail_arn: return trail raise ValueError('A trail could not be found for %s' % trail_arn) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/cloudtrail/validation.py0000644000000000000000000011632400000000000024336 0ustar00rootroot00000000000000# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import base64 import binascii import json import hashlib import logging import re import sys import zlib from zlib import error as ZLibError from datetime import datetime, timedelta from dateutil import tz, parser from pyasn1.error import PyAsn1Error import rsa from awscli.customizations.cloudtrail.utils import get_trail_by_arn, \ get_account_id_from_arn from awscli.customizations.commands import BasicCommand from botocore.exceptions import ClientError from awscli.schema import ParameterRequiredError LOG = logging.getLogger(__name__) DATE_FORMAT = '%Y%m%dT%H%M%SZ' DISPLAY_DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' def format_date(date): """Returns a formatted date string in a CloudTrail date format""" return date.strftime(DATE_FORMAT) def format_display_date(date): """Returns a formatted date string meant for CLI output""" return date.strftime(DISPLAY_DATE_FORMAT) def normalize_date(date): """Returns a normalized date using a UTC timezone""" return date.replace(tzinfo=tz.tzutc()) def extract_digest_key_date(digest_s3_key): """Extract the timestamp portion of a manifest file. Manifest file names take the following form: AWSLogs/{account}/CloudTrail-Digest/{region}/{ymd}/{account}_CloudTrail \ -Digest_{region}_{name}_region_{date}.json.gz """ return digest_s3_key[-24:-8] def parse_date(date_string): try: return parser.parse(date_string) except ValueError: raise ValueError('Unable to parse date value: %s' % date_string) def assert_cloudtrail_arn_is_valid(trail_arn): """Ensures that the arn looks correct. ARNs look like: arn:aws:cloudtrail:us-east-1:123456789012:trail/foo""" pattern = re.compile('arn:.+:cloudtrail:.+:\d{12}:trail/.+') if not pattern.match(trail_arn): raise ValueError('Invalid trail ARN provided: %s' % trail_arn) def create_digest_traverser(cloudtrail_client, organization_client, s3_client_provider, trail_arn, trail_source_region=None, on_invalid=None, on_gap=None, on_missing=None, bucket=None, prefix=None, account_id=None): """Creates a CloudTrail DigestTraverser and its object graph. :type cloudtrail_client: botocore.client.CloudTrail :param cloudtrail_client: Client used to connect to CloudTrail :type organization_client: botocore.client.organizations :param organization_client: Client used to connect to Organizations :type s3_client_provider: S3ClientProvider :param s3_client_provider: Used to create Amazon S3 client per/region. :param trail_arn: CloudTrail trail ARN :param trail_source_region: The scanned region of a trail. :param on_invalid: Callback that is invoked when validating a digest fails. :param on_gap: Callback that is invoked when a digest has no link to the previous digest, but there are more digests to validate. This can happen when a trail is disabled for a period of time. :param on_missing: Callback that is invoked when a digest file has been deleted from Amazon S3 but is supposed to be present. :param bucket: Amazon S3 bucket of the trail if it is different than the bucket that is currently associated with the trail. :param prefix: bucket: Key prefix prepended to each digest and log placed in the Amazon S3 bucket if it is different than the prefix that is currently associated with the trail. :param account_id: The account id for which the digest files are validated. For normal trails this is the caller account, for organization trails it is the member accout. ``on_gap``, ``on_invalid``, and ``on_missing`` callbacks are invoked with the following named arguments: - ``bucket`: The next S3 bucket. - ``next_key``: (optional) Next digest key that was found in the bucket. - ``next_end_date``: (optional) End date of the next found digest. - ``last_key``: The last digest key that was found. - ``last_start_date``: (optional) Start date of last found digest. - ``message``: (optional) Message string about the notification. """ assert_cloudtrail_arn_is_valid(trail_arn) organization_id = None if bucket is None: # Determine the bucket and prefix based on the trail arn. trail_info = get_trail_by_arn(cloudtrail_client, trail_arn) LOG.debug('Loaded trail info: %s', trail_info) bucket = trail_info['S3BucketName'] prefix = trail_info.get('S3KeyPrefix', None) is_org_trail = trail_info.get('IsOrganizationTrail') if is_org_trail: if not account_id: raise ParameterRequiredError( "Missing required parameter for organization " "trail: '--account-id'") organization_id = organization_client.describe_organization()[ 'Organization']['Id'] # Determine the region from the ARN (e.g., arn:aws:cloudtrail:REGION:...) trail_region = trail_arn.split(':')[3] # Determine the name from the ARN (the last part after "/") trail_name = trail_arn.split('/')[-1] # If account id is not specified parse it from trail ARN if not account_id: account_id = get_account_id_from_arn(trail_arn) digest_provider = DigestProvider( account_id=account_id, trail_name=trail_name, s3_client_provider=s3_client_provider, trail_source_region=trail_source_region, trail_home_region=trail_region, organization_id=organization_id) return DigestTraverser( digest_provider=digest_provider, starting_bucket=bucket, starting_prefix=prefix, on_invalid=on_invalid, on_gap=on_gap, on_missing=on_missing, public_key_provider=PublicKeyProvider(cloudtrail_client)) class S3ClientProvider(object): """Creates Amazon S3 clients and determines the region name of a client. This class will cache the location constraints of previously requested buckets and cache previously created clients for the same region. """ def __init__(self, session, get_bucket_location_region='us-east-1'): self._session = session self._get_bucket_location_region = get_bucket_location_region self._client_cache = {} self._region_cache = {} def get_client(self, bucket_name): """Creates an S3 client that can work with the given bucket name""" region_name = self._get_bucket_region(bucket_name) return self._create_client(region_name) def _get_bucket_region(self, bucket_name): """Returns the region of a bucket""" if bucket_name not in self._region_cache: client = self._create_client(self._get_bucket_location_region) result = client.get_bucket_location(Bucket=bucket_name) region = result['LocationConstraint'] or 'us-east-1' self._region_cache[bucket_name] = region return self._region_cache[bucket_name] def _create_client(self, region_name): """Creates an Amazon S3 client for the given region name""" if region_name not in self._client_cache: client = self._session.create_client('s3', region_name) # Remove the CLI error event that prevents exceptions. self._client_cache[region_name] = client return self._client_cache[region_name] class DigestError(ValueError): """Exception raised when a digest fails to validate""" pass class DigestSignatureError(DigestError): """Exception raised when a digest signature is invalid""" def __init__(self, bucket, key): message = ('Digest file\ts3://%s/%s\tINVALID: signature verification ' 'failed') % (bucket, key) super(DigestSignatureError, self).__init__(message) class InvalidDigestFormat(DigestError): """Exception raised when a digest has an invalid format""" def __init__(self, bucket, key): message = 'Digest file\ts3://%s/%s\tINVALID: invalid format' % (bucket, key) super(InvalidDigestFormat, self).__init__(message) class PublicKeyProvider(object): """Retrieves public keys from CloudTrail within a date range.""" def __init__(self, cloudtrail_client): self._cloudtrail_client = cloudtrail_client def get_public_keys(self, start_date, end_date): """Loads public keys in a date range into a returned dict. :type start_date: datetime :param start_date: Start date of a date range. :type end_date: datetime :param end_date: End date of a date range. :rtype: dict :return: Returns a dict where each key is the fingerprint of the public key, and each value is a dict of public key data. """ public_keys = self._cloudtrail_client.list_public_keys( StartTime=start_date, EndTime=end_date) public_keys_in_range = public_keys['PublicKeyList'] LOG.debug('Loaded public keys in range: %s', public_keys_in_range) return dict((key['Fingerprint'], key) for key in public_keys_in_range) class DigestProvider(object): """ Retrieves digest keys and digests from Amazon S3. This class is responsible for determining the full list of digest files in a bucket and loading digests from the bucket into a JSON decoded dict. This class is not responsible for validation or iterating from one digest to the next. """ def __init__(self, s3_client_provider, account_id, trail_name, trail_home_region, trail_source_region=None, organization_id=None): self._client_provider = s3_client_provider self.trail_name = trail_name self.account_id = account_id self.trail_home_region = trail_home_region self.trail_source_region = trail_source_region or trail_home_region self.organization_id = organization_id def load_digest_keys_in_range(self, bucket, prefix, start_date, end_date): """Returns a list of digest keys in the date range. This method uses a list_objects API call and provides a Marker parameter that is calculated based on the start_date provided. Amazon S3 then returns all keys in the bucket that start after the given key (non-inclusive). We then iterate over the keys until the date extracted from the yielded keys is greater than the given end_date. """ digests = [] marker = self._create_digest_key(start_date, prefix) client = self._client_provider.get_client(bucket) paginator = client.get_paginator('list_objects') page_iterator = paginator.paginate(Bucket=bucket, Marker=marker) key_filter = page_iterator.search('Contents[*].Key') # Create a target start end end date target_start_date = format_date(normalize_date(start_date)) # Add one hour to the end_date to get logs that spilled over to next. target_end_date = format_date( normalize_date(end_date + timedelta(hours=1))) # Ensure digests are from the same trail. digest_key_regex = re.compile(self._create_digest_key_regex(prefix)) for key in key_filter: if digest_key_regex.match(key): # Use a lexicographic comparison to know when to stop. extracted_date = extract_digest_key_date(key) if extracted_date > target_end_date: break # Only append digests after the start date. if extracted_date >= target_start_date: digests.append(key) return digests def fetch_digest(self, bucket, key): """Loads a digest by key from S3. Returns the JSON decode data and GZIP inflated raw content. """ client = self._client_provider.get_client(bucket) result = client.get_object(Bucket=bucket, Key=key) try: digest = zlib.decompress(result['Body'].read(), zlib.MAX_WBITS | 16) digest_data = json.loads(digest.decode()) except (ValueError, ZLibError): # Cannot gzip decode or JSON parse. raise InvalidDigestFormat(bucket, key) # Add the expected digest signature and algorithm to the dict. if 'signature' not in result['Metadata'] \ or 'signature-algorithm' not in result['Metadata']: raise DigestSignatureError(bucket, key) digest_data['_signature'] = result['Metadata']['signature'] digest_data['_signature_algorithm'] = \ result['Metadata']['signature-algorithm'] return digest_data, digest def _create_digest_key(self, start_date, key_prefix): """Computes an Amazon S3 key based on the provided data. The computed is what would have been placed in the S3 bucket if a log digest were created at a specific time. This computed key does not have to actually exist as it will only be used to as a Marker parameter in a list_objects call. :return: Returns a computed key as a string. """ # Subtract one minute to ensure the dates are inclusive. date = start_date - timedelta(minutes=1) template = 'AWSLogs/' template_params = { 'account_id': self.account_id, 'date': format_date(date), 'ymd': date.strftime('%Y/%m/%d'), 'source_region': self.trail_source_region, 'home_region': self.trail_home_region, 'name': self.trail_name } if self.organization_id: template += '{organization_id}/' template_params['organization_id'] = self.organization_id template += ( '{account_id}/CloudTrail-Digest/{source_region}/' '{ymd}/{account_id}_CloudTrail-Digest_{source_region}_{name}_' '{home_region}_{date}.json.gz' ) key = template.format(**template_params) if key_prefix: key = key_prefix + '/' + key return key def _create_digest_key_regex(self, key_prefix): """Creates a regular expression used to match against S3 keys""" template = 'AWSLogs/' template_params = { 'account_id': re.escape(self.account_id), 'source_region': re.escape(self.trail_source_region), 'home_region': re.escape(self.trail_home_region), 'name': re.escape(self.trail_name) } if self.organization_id: template += '{organization_id}/' template_params['organization_id'] = self.organization_id template += ( '{account_id}/CloudTrail\\-Digest/{source_region}/' '\\d+/\\d+/\\d+/{account_id}_CloudTrail\\-Digest_' '{source_region}_{name}_{home_region}_.+\\.json\\.gz' ) key = template.format(**template_params) if key_prefix: key = re.escape(key_prefix) + '/' + key return '^' + key + '$' class DigestTraverser(object): """Retrieves and validates digests within a date range.""" # These keys are required to be present before validating the contents # of a digest. required_digest_keys = ['digestPublicKeyFingerprint', 'digestS3Bucket', 'digestS3Object', 'previousDigestSignature', 'digestEndTime', 'digestStartTime'] def __init__(self, digest_provider, starting_bucket, starting_prefix, public_key_provider, digest_validator=None, on_invalid=None, on_gap=None, on_missing=None): """ :type digest_provider: DigestProvider :param digest_provider: DigestProvider object :param starting_bucket: S3 bucket where the digests are stored. :param starting_prefix: An optional prefix applied to each S3 key. :param public_key_provider: Provides public keys for a range. :param digest_validator: Validates digest using a validate method. :param on_invalid: Callback invoked when a digest is invalid. :param on_gap: Callback invoked when a digest has no parent, but there are still more digests to validate. :param on_missing: Callback invoked when a digest file is missing. """ self.starting_bucket = starting_bucket self.starting_prefix = starting_prefix self.digest_provider = digest_provider self._public_key_provider = public_key_provider self._on_gap = on_gap self._on_invalid = on_invalid self._on_missing = on_missing if digest_validator is None: digest_validator = Sha256RSADigestValidator() self._digest_validator = digest_validator def traverse(self, start_date, end_date=None): """Creates and returns a generator that yields validated digest data. Each yielded digest dictionary contains information about the digest and the log file associated with the digest. Digest files are validated before they are yielded. Whether or not the digest is successfully validated is stated in the "isValid" key value pair of the yielded dictionary. :type start_date: datetime :param start_date: Date to start validating from (inclusive). :type start_date: datetime :param end_date: Date to stop validating at (inclusive). """ if end_date is None: end_date = datetime.utcnow() end_date = normalize_date(end_date) start_date = normalize_date(start_date) bucket = self.starting_bucket prefix = self.starting_prefix digests = self._load_digests(bucket, prefix, start_date, end_date) public_keys = self._load_public_keys(start_date, end_date) key, end_date = self._get_last_digest(digests) last_start_date = end_date while key and start_date <= last_start_date: try: digest, end_date = self._load_and_validate_digest( public_keys, bucket, key) last_start_date = normalize_date( parse_date(digest['digestStartTime'])) previous_bucket = digest.get('previousDigestS3Bucket', None) yield digest if previous_bucket is None: # The chain is broken, so find next in digest store. key, end_date = self._find_next_digest( digests=digests, bucket=bucket, last_key=key, last_start_date=last_start_date, cb=self._on_gap, is_cb_conditional=True) else: key = digest['previousDigestS3Object'] if previous_bucket != bucket: bucket = previous_bucket # The bucket changed so reload the digest list. digests = self._load_digests( bucket, prefix, start_date, end_date) except ClientError as e: if e.response['Error']['Code'] != 'NoSuchKey': raise e key, end_date = self._find_next_digest( digests=digests, bucket=bucket, last_key=key, last_start_date=last_start_date, cb=self._on_missing, message=str(e)) except DigestError as e: key, end_date = self._find_next_digest( digests=digests, bucket=bucket, last_key=key, last_start_date=last_start_date, cb=self._on_invalid, message=str(e)) except Exception as e: # Any other unexpected errors. key, end_date = self._find_next_digest( digests=digests, bucket=bucket, last_key=key, last_start_date=last_start_date, cb=self._on_invalid, message='Digest file\ts3://%s/%s\tINVALID: %s' % (bucket, key, str(e))) def _load_digests(self, bucket, prefix, start_date, end_date): return self.digest_provider.load_digest_keys_in_range( bucket=bucket, prefix=prefix, start_date=start_date, end_date=end_date) def _find_next_digest(self, digests, bucket, last_key, last_start_date, cb=None, is_cb_conditional=False, message=None): """Finds the next digest in the bucket and invokes any callback.""" next_key, next_end_date = self._get_last_digest(digests, last_key) if cb and (not is_cb_conditional or next_key): cb(bucket=bucket, next_key=next_key, last_key=last_key, next_end_date=next_end_date, last_start_date=last_start_date, message=message) return next_key, next_end_date def _get_last_digest(self, digests, before_key=None): """Finds the previous digest key (either the last or before before_key) If no key is provided, the last digest is used. If a digest is found, the end date of the provider is adjusted to match the found key's end date. """ if not digests: return None, None elif before_key is None: next_key = digests.pop() next_key_date = normalize_date( parse_date(extract_digest_key_date(next_key))) return next_key, next_key_date # find a key before the given key. before_key_date = parse_date(extract_digest_key_date(before_key)) while digests: next_key = digests.pop() next_key_date = normalize_date( parse_date(extract_digest_key_date(next_key))) if next_key_date < before_key_date: LOG.debug("Next found key: %s", next_key) return next_key, next_key_date return None, None def _load_and_validate_digest(self, public_keys, bucket, key): """Loads and validates a digest from S3. :param public_keys: Public key dictionary of fingerprint to dict. :return: Returns a tuple of the digest data as a dict and end_date :rtype: tuple """ digest_data, digest = self.digest_provider.fetch_digest(bucket, key) for required_key in self.required_digest_keys: if required_key not in digest_data: raise InvalidDigestFormat(bucket, key) # Ensure the bucket and key are the same as what's expected. if digest_data['digestS3Bucket'] != bucket \ or digest_data['digestS3Object'] != key: raise DigestError( ('Digest file\ts3://%s/%s\tINVALID: has been moved from its ' 'original location') % (bucket, key)) # Get the public keys in the given time range. fingerprint = digest_data['digestPublicKeyFingerprint'] if fingerprint not in public_keys: raise DigestError( ('Digest file\ts3://%s/%s\tINVALID: public key not found in ' 'region %s for fingerprint %s') % (bucket, key, self.digest_provider.trail_home_region, fingerprint)) public_key_hex = public_keys[fingerprint]['Value'] self._digest_validator.validate( bucket, key, public_key_hex, digest_data, digest) end_date = normalize_date(parse_date(digest_data['digestEndTime'])) return digest_data, end_date def _load_public_keys(self, start_date, end_date): public_keys = self._public_key_provider.get_public_keys( start_date, end_date) if not public_keys: raise RuntimeError( 'No public keys found between %s and %s' % (format_display_date(start_date), format_display_date(end_date))) return public_keys class Sha256RSADigestValidator(object): """ Validates SHA256withRSA signed digests. The result of validating the digest is inserted into the digest_data dictionary using the isValid key value pair. """ def validate(self, bucket, key, public_key, digest_data, inflated_digest): """Validates a digest file. Throws a DigestError when the digest is invalid. :param bucket: Bucket of the digest file :param key: Key of the digest file :param public_key: Public key bytes. :param digest_data: Dict of digest data returned when JSON decoding a manifest. :param inflated_digest: Inflated digest file contents as bytes. """ try: decoded_key = base64.b64decode(public_key) public_key = rsa.PublicKey.load_pkcs1(decoded_key, format='DER') to_sign = self._create_string_to_sign(digest_data, inflated_digest) signature_bytes = binascii.unhexlify(digest_data['_signature']) rsa.verify(to_sign, signature_bytes, public_key) except PyAsn1Error: raise DigestError( ('Digest file\ts3://%s/%s\tINVALID: Unable to load PKCS #1 key' ' with fingerprint %s') % (bucket, key, digest_data['digestPublicKeyFingerprint'])) except rsa.pkcs1.VerificationError: # Note from the Python-RSA docs: Never display the stack trace of # a rsa.pkcs1.VerificationError exception. It shows where in the # code the exception occurred, and thus leaks information about # the key. raise DigestSignatureError(bucket, key) def _create_string_to_sign(self, digest_data, inflated_digest): previous_signature = digest_data['previousDigestSignature'] if previous_signature is None: # The value must be 'null' to match the Java implementation. previous_signature = 'null' string_to_sign = "%s\n%s/%s\n%s\n%s" % ( digest_data['digestEndTime'], digest_data['digestS3Bucket'], digest_data['digestS3Object'], hashlib.sha256(inflated_digest).hexdigest(), previous_signature) LOG.debug('Digest string to sign: %s', string_to_sign) return string_to_sign.encode() class CloudTrailValidateLogs(BasicCommand): """ Validates log digests and log files, optionally saving them to disk. """ NAME = 'validate-logs' DESCRIPTION = """ Validates CloudTrail logs for a given period of time. This command uses the digest files delivered to your S3 bucket to perform the validation. The AWS CLI allows you to detect the following types of changes: - Modification or deletion of CloudTrail log files. - Modification or deletion of CloudTrail digest files. To validate log files with the AWS CLI, the following preconditions must be met: - You must have online connectivity to AWS. - You must have read access to the S3 bucket that contains the digest and log files. - The digest and log files must not have been moved from the original S3 location where CloudTrail delivered them. - For organization trails you must have access to describe-organization to validate digest files When you disable Log File Validation, the chain of digest files is broken after one hour. CloudTrail will not digest log files that were delivered during a period in which the Log File Validation feature was disabled. For example, if you enable Log File Validation on January 1, disable it on January 2, and re-enable it on January 10, digest files will not be created for the log files delivered from January 3 to January 9. The same applies whenever you stop CloudTrail logging or delete a trail. .. note:: Log files that have been downloaded to local disk cannot be validated with the AWS CLI. The CLI will download all log files each time this command is executed. .. note:: This command requires that the role executing the command has permission to call ListObjects, GetObject, and GetBucketLocation for each bucket referenced by the trail. """ ARG_TABLE = [ {'name': 'trail-arn', 'required': True, 'cli_type_name': 'string', 'help_text': 'Specifies the ARN of the trail to be validated'}, {'name': 'start-time', 'required': True, 'cli_type_name': 'string', 'help_text': ('Specifies that log files delivered on or after the ' 'specified UTC timestamp value will be validated. ' 'Example: "2015-01-08T05:21:42Z".')}, {'name': 'end-time', 'cli_type_name': 'string', 'help_text': ('Optionally specifies that log files delivered on or ' 'before the specified UTC timestamp value will be ' 'validated. The default value is the current time. ' 'Example: "2015-01-08T12:31:41Z".')}, {'name': 's3-bucket', 'cli_type_name': 'string', 'help_text': ('Optionally specifies the S3 bucket where the digest ' 'files are stored. If a bucket name is not specified, ' 'the CLI will retrieve it by calling describe_trails')}, {'name': 's3-prefix', 'cli_type_name': 'string', 'help_text': ('Optionally specifies the optional S3 prefix where the ' 'digest files are stored. If not specified, the CLI ' 'will determine the prefix automatically by calling ' 'describe_trails.')}, {'name': 'account-id', 'cli_type_name': 'string', 'help_text': ('Optionally specifies the account for validating logs. ' 'This parameter is needed for organization trails ' 'for validating logs for specific account inside an ' 'organization')}, {'name': 'verbose', 'cli_type_name': 'boolean', 'action': 'store_true', 'help_text': 'Display verbose log validation information'} ] def __init__(self, session): super(CloudTrailValidateLogs, self).__init__(session) self.trail_arn = None self.is_verbose = False self.start_time = None self.end_time = None self.s3_bucket = None self.s3_prefix = None self.s3_client_provider = None self.cloudtrail_client = None self.account_id = None self._source_region = None self._valid_digests = 0 self._invalid_digests = 0 self._valid_logs = 0 self._invalid_logs = 0 self._is_last_status_double_space = True self._found_start_time = None self._found_end_time = None def _run_main(self, args, parsed_globals): self.handle_args(args) self.setup_services(parsed_globals) self._call() if self._invalid_digests > 0 or self._invalid_logs > 0: return 1 return 0 def handle_args(self, args): self.trail_arn = args.trail_arn self.is_verbose = args.verbose self.s3_bucket = args.s3_bucket self.s3_prefix = args.s3_prefix self.account_id = args.account_id self.start_time = normalize_date(parse_date(args.start_time)) if args.end_time: self.end_time = normalize_date(parse_date(args.end_time)) else: self.end_time = normalize_date(datetime.utcnow()) if self.start_time > self.end_time: raise ValueError(('Invalid time range specified: start-time must ' 'occur before end-time')) # Found start time always defaults to the given start time. This value # may change if the earliest found digest is after the given start # time. Note that the summary output report of what date ranges were # actually found is only shown if a valid digest is encountered, # thereby setting self._found_end_time to a value. self._found_start_time = self.start_time def setup_services(self, parsed_globals): self._source_region = parsed_globals.region # Use the the same region as the region of the CLI to get locations. self.s3_client_provider = S3ClientProvider( self._session, self._source_region) client_args = {'region_name': parsed_globals.region, 'verify': parsed_globals.verify_ssl} self.organization_client = self._session.create_client( 'organizations', **client_args) if parsed_globals.endpoint_url is not None: client_args['endpoint_url'] = parsed_globals.endpoint_url self.cloudtrail_client = self._session.create_client( 'cloudtrail', **client_args) def _call(self): traverser = create_digest_traverser( trail_arn=self.trail_arn, cloudtrail_client=self.cloudtrail_client, organization_client=self.organization_client, trail_source_region=self._source_region, s3_client_provider=self.s3_client_provider, bucket=self.s3_bucket, prefix=self.s3_prefix, on_missing=self._on_missing_digest, on_invalid=self._on_invalid_digest, on_gap=self._on_digest_gap, account_id=self.account_id) self._write_startup_text() digests = traverser.traverse(self.start_time, self.end_time) for digest in digests: # Only valid digests are yielded and only valid digests can adjust # the found times that are reported in the CLI output summary. self._track_found_times(digest) self._valid_digests += 1 self._write_status( 'Digest file\ts3://%s/%s\tvalid' % (digest['digestS3Bucket'], digest['digestS3Object'])) if not digest['logFiles']: continue for log in digest['logFiles']: self._download_log(log) self._write_summary_text() def _track_found_times(self, digest): # Track the earliest found start time, but do not use a date before # the user supplied start date. digest_start_time = parse_date(digest['digestStartTime']) if digest_start_time > self.start_time: self._found_start_time = digest_start_time # Only use the last found end time if it is less than the # user supplied end time (or the current date). if not self._found_end_time: digest_end_time = parse_date(digest['digestEndTime']) self._found_end_time = min(digest_end_time, self.end_time) def _download_log(self, log): """ Download a log, decompress, and compare SHA256 checksums""" try: # Create a client that can work with this bucket. client = self.s3_client_provider.get_client(log['s3Bucket']) response = client.get_object( Bucket=log['s3Bucket'], Key=log['s3Object']) gzip_inflater = zlib.decompressobj(zlib.MAX_WBITS | 16) rolling_hash = hashlib.sha256() for chunk in iter(lambda: response['Body'].read(2048), b""): data = gzip_inflater.decompress(chunk) rolling_hash.update(data) remaining_data = gzip_inflater.flush() if remaining_data: rolling_hash.update(remaining_data) computed_hash = rolling_hash.hexdigest() if computed_hash != log['hashValue']: self._on_log_invalid(log) else: self._valid_logs += 1 self._write_status(('Log file\ts3://%s/%s\tvalid' % (log['s3Bucket'], log['s3Object']))) except ClientError as e: if e.response['Error']['Code'] != 'NoSuchKey': raise self._on_missing_log(log) except Exception: self._on_invalid_log_format(log) def _write_status(self, message, is_error=False): if is_error: if self._is_last_status_double_space: sys.stderr.write("%s\n\n" % message) else: sys.stderr.write("\n%s\n\n" % message) self._is_last_status_double_space = True elif self.is_verbose: self._is_last_status_double_space = False sys.stdout.write("%s\n" % message) def _write_startup_text(self): sys.stdout.write( 'Validating log files for trail %s between %s and %s\n\n' % (self.trail_arn, format_display_date(self.start_time), format_display_date(self.end_time))) def _write_summary_text(self): if not self._is_last_status_double_space: sys.stdout.write('\n') sys.stdout.write('Results requested for %s to %s\n' % (format_display_date(self.start_time), format_display_date(self.end_time))) if not self._valid_digests and not self._invalid_digests: sys.stdout.write('No digests found\n') return if not self._found_start_time or not self._found_end_time: sys.stdout.write('No valid digests found in range\n') else: sys.stdout.write('Results found for %s to %s:\n' % (format_display_date(self._found_start_time), format_display_date(self._found_end_time))) self._write_ratio(self._valid_digests, self._invalid_digests, 'digest') self._write_ratio(self._valid_logs, self._invalid_logs, 'log') sys.stdout.write('\n') def _write_ratio(self, valid, invalid, name): total = valid + invalid if total > 0: sys.stdout.write('\n%d/%d %s files valid' % (valid, total, name)) if invalid > 0: sys.stdout.write(', %d/%d %s files INVALID' % (invalid, total, name)) def _on_missing_digest(self, bucket, last_key, **kwargs): self._invalid_digests += 1 self._write_status('Digest file\ts3://%s/%s\tINVALID: not found' % (bucket, last_key), True) def _on_digest_gap(self, **kwargs): self._write_status( 'No log files were delivered by CloudTrail between %s and %s' % (format_display_date(kwargs['next_end_date']), format_display_date(kwargs['last_start_date'])), True) def _on_invalid_digest(self, message, **kwargs): self._invalid_digests += 1 self._write_status(message, True) def _on_invalid_log_format(self, log_data): self._invalid_logs += 1 self._write_status( ('Log file\ts3://%s/%s\tINVALID: invalid format' % (log_data['s3Bucket'], log_data['s3Object'])), True) def _on_log_invalid(self, log_data): self._invalid_logs += 1 self._write_status( "Log file\ts3://%s/%s\tINVALID: hash value doesn't match" % (log_data['s3Bucket'], log_data['s3Object']), True) def _on_missing_log(self, log_data): self._invalid_logs += 1 self._write_status( 'Log file\ts3://%s/%s\tINVALID: not found' % (log_data['s3Bucket'], log_data['s3Object']), True) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0892723 awscli-1.22.34/awscli/customizations/codeartifact/0000755000000000000000000000000000000000000022111 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codeartifact/__init__.py0000644000000000000000000000051600000000000024224 0ustar00rootroot00000000000000from awscli.customizations.codeartifact.login import CodeArtifactLogin def register_codeartifact_commands(event_emitter): event_emitter.register( 'building-command-table.codeartifact', inject_commands ) def inject_commands(command_table, session, **kwargs): command_table['login'] = CodeArtifactLogin(session) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codeartifact/login.py0000644000000000000000000005305600000000000023604 0ustar00rootroot00000000000000import errno import os import platform import sys import subprocess import re from datetime import datetime from dateutil.tz import tzutc from dateutil.relativedelta import relativedelta from botocore.utils import parse_timestamp from awscli.compat import is_windows, urlparse, RawConfigParser, StringIO from awscli.customizations import utils as cli_utils from awscli.customizations.commands import BasicCommand from awscli.customizations.utils import uni_print def get_relative_expiration_time(remaining): values = [] prev_non_zero_attr = False for attr in ["years", "months", "days", "hours", "minutes"]: value = getattr(remaining, attr) if value > 0: if prev_non_zero_attr: values.append("and") values.append(str(value)) values.append(attr[:-1] if value == 1 else attr) if prev_non_zero_attr: break prev_non_zero_attr = value > 0 message = " ".join(values) return message class BaseLogin(object): _TOOL_NOT_FOUND_MESSAGE = '%s was not found. Please verify installation.' def __init__(self, auth_token, expiration, repository_endpoint, domain, repository, subprocess_utils, namespace=None): self.auth_token = auth_token self.expiration = expiration self.repository_endpoint = repository_endpoint self.domain = domain self.repository = repository self.subprocess_utils = subprocess_utils self.namespace = namespace def login(self, dry_run=False): raise NotImplementedError('login()') def _dry_run_commands(self, tool, commands): for command in commands: sys.stdout.write(' '.join(command)) sys.stdout.write(os.linesep) sys.stdout.write(os.linesep) def _write_success_message(self, tool): # add extra 30 seconds make expiration more reasonable # for some corner case # e.g. 11 hours 59 minutes 31 seconds should output --> 12 hours. remaining = relativedelta( self.expiration, datetime.now(tzutc())) + relativedelta(seconds=30) expiration_message = get_relative_expiration_time(remaining) sys.stdout.write('Successfully configured {} to use ' 'AWS CodeArtifact repository {} ' .format(tool, self.repository_endpoint)) sys.stdout.write(os.linesep) sys.stdout.write('Login expires in {} at {}'.format( expiration_message, self.expiration)) sys.stdout.write(os.linesep) def _run_commands(self, tool, commands, dry_run=False): if dry_run: self._dry_run_commands(tool, commands) return for command in commands: try: self.subprocess_utils.check_call( command, stdout=self.subprocess_utils.PIPE, stderr=self.subprocess_utils.PIPE, ) except OSError as ex: if ex.errno == errno.ENOENT: raise ValueError( self._TOOL_NOT_FOUND_MESSAGE % tool ) raise ex self._write_success_message(tool) @classmethod def get_commands(cls, endpoint, auth_token, **kwargs): raise NotImplementedError('get_commands()') class NuGetBaseLogin(BaseLogin): _NUGET_INDEX_URL_FMT = '{endpoint}v3/index.json' # When adding new sources we can specify that we added the source to the # user level NuGet.Config file. However, when updating an existing source # we cannot be specific about which level NuGet.Config file was updated # because it is possible that the existing source was not in the user # level NuGet.Config. The source listing command returns all configured # sources from all NuGet.Config levels. The update command updates the # source in whichever NuGet.Config file the source was found. _SOURCE_ADDED_MESSAGE = 'Added source %s to the user level NuGet.Config\n' _SOURCE_UPDATED_MESSAGE = 'Updated source %s in the NuGet.Config\n' def login(self, dry_run=False): try: source_to_url_dict = self._get_source_to_url_dict() except OSError as ex: if ex.errno == errno.ENOENT: raise ValueError( self._TOOL_NOT_FOUND_MESSAGE % self._get_tool_name() ) raise ex nuget_index_url = self._NUGET_INDEX_URL_FMT.format( endpoint=self.repository_endpoint ) source_name, already_exists = self._get_source_name( nuget_index_url, source_to_url_dict ) if already_exists: command = self._get_configure_command( 'update', nuget_index_url, source_name ) source_configured_message = self._SOURCE_UPDATED_MESSAGE else: command = self._get_configure_command('add', nuget_index_url, source_name) source_configured_message = self._SOURCE_ADDED_MESSAGE if dry_run: dry_run_command = ' '.join([str(cd) for cd in command]) uni_print(dry_run_command) uni_print('\n') return try: self.subprocess_utils.check_output( command, stderr=self.subprocess_utils.PIPE ) except subprocess.CalledProcessError as e: uni_print('Failed to update the NuGet.Config\n') raise e uni_print(source_configured_message % source_name) self._write_success_message('nuget') def _get_source_to_url_dict(self): # The response from listing sources takes the following form: # # Registered Sources: # 1. Source Name 1 [Enabled] # https://source1.com/index.json # 2. Source Name 2 [Disabled] # https://source2.com/index.json # ... # 100. Source Name 100 # https://source100.com/index.json # Or it can be (blank line after Registered Sources:) # Registered Sources: # 1. Source Name 1 [Enabled] # https://source1.com/index.json # 2. Source Name 2 [Disabled] # https://source2.com/index.json # ... # 100. Source Name 100 # https://source100.com/index.json response = self.subprocess_utils.check_output( self._get_list_command(), stderr=self.subprocess_utils.PIPE ) lines = response.decode("utf-8").splitlines() lines = [line for line in lines if line.strip() != ''] source_to_url_dict = {} for i in range(1, len(lines), 2): source_to_url_dict[self._parse_source_name(lines[i])] = \ self._parse_source_url(lines[i + 1]) return source_to_url_dict def _parse_source_name(self, line): # A source name line takes the following form: # 1. NuGet Source [Enabled] # Remove the Enabled/Disabled tag. line_without_tag = line.strip().rsplit(' [', 1)[0] # Remove the leading number. return line_without_tag.split(None, 1)[1] def _parse_source_url(self, line): # A source url line takes the following form: # https://source.com/index.json return line.strip() def _get_source_name(self, codeartifact_url, source_dict): default_name = '{}/{}'.format(self.domain, self.repository) # Check if the CodeArtifact URL is already present in the # NuGet.Config file. If the URL already exists, use the source name # already assigned to the CodeArtifact URL. for source_name, source_url in source_dict.items(): if source_url == codeartifact_url: return source_name, True # If the CodeArtifact URL is not present in the NuGet.Config file, # check if the default source name already exists so we can know # whether we need to add a new entry or update the existing entry. for source_name in source_dict.keys(): if source_name == default_name: return source_name, True # If neither the source url nor the source name already exist in the # NuGet.Config file, use the default source name. return default_name, False def _get_tool_name(self): raise NotImplementedError('_get_tool_name()') def _get_list_command(self): raise NotImplementedError('_get_list_command()') def _get_configure_command(self, operation, nuget_index_url, source_name): raise NotImplementedError('_get_configure_command()') class NuGetLogin(NuGetBaseLogin): def _get_tool_name(self): return 'nuget' def _get_list_command(self): return ['nuget', 'sources', 'list', '-format', 'detailed'] def _get_configure_command(self, operation, nuget_index_url, source_name): return [ 'nuget', 'sources', operation, '-name', source_name, '-source', nuget_index_url, '-username', 'aws', '-password', self.auth_token ] class DotNetLogin(NuGetBaseLogin): def _get_tool_name(self): return 'dotnet' def _get_list_command(self): return ['dotnet', 'nuget', 'list', 'source', '--format', 'detailed'] def _get_configure_command(self, operation, nuget_index_url, source_name): command = ['dotnet', 'nuget', operation, 'source'] if operation == 'add': command.append(nuget_index_url) command += ['--name', source_name] else: command.append(source_name) command += ['--source', nuget_index_url] command += [ '--username', 'aws', '--password', self.auth_token ] # Encryption is not supported on non-Windows platforms. if not is_windows: command.append('--store-password-in-clear-text') return command class NpmLogin(BaseLogin): # On Windows we need to be explicit about the .cmd file to execute # (unless we execute through the shell, i.e. with shell=True). NPM_CMD = 'npm.cmd' if platform.system().lower() == 'windows' else 'npm' def login(self, dry_run=False): scope = self.get_scope( self.namespace ) commands = self.get_commands( self.repository_endpoint, self.auth_token, scope=scope ) self._run_commands('npm', commands, dry_run) @classmethod def get_scope(cls, namespace): # Regex for valid scope name valid_scope_name = re.compile('^(@[a-z0-9-~][a-z0-9-._~]*)') if namespace is None: return namespace # Add @ prefix to scope if it doesn't exist if namespace.startswith('@'): scope = namespace else: scope = '@{}'.format(namespace) if not valid_scope_name.match(scope): raise ValueError( 'Invalid scope name, scope must contain URL-safe ' 'characters, no leading dots or underscores' ) return scope @classmethod def get_commands(cls, endpoint, auth_token, **kwargs): commands = [] scope = kwargs.get('scope') # prepend scope if it exists registry = '{}:registry'.format(scope) if scope else 'registry' # set up the codeartifact repository as the npm registry. commands.append( [cls.NPM_CMD, 'config', 'set', registry, endpoint] ) repo_uri = urlparse.urlsplit(endpoint) # configure npm to always require auth for the repository. always_auth_config = '//{}{}:always-auth'.format( repo_uri.netloc, repo_uri.path ) commands.append( [cls.NPM_CMD, 'config', 'set', always_auth_config, 'true'] ) # set auth info for the repository. auth_token_config = '//{}{}:_authToken'.format( repo_uri.netloc, repo_uri.path ) commands.append( [cls.NPM_CMD, 'config', 'set', auth_token_config, auth_token] ) return commands class PipLogin(BaseLogin): PIP_INDEX_URL_FMT = '{scheme}://aws:{auth_token}@{netloc}{path}simple/' def login(self, dry_run=False): commands = self.get_commands( self.repository_endpoint, self.auth_token ) self._run_commands('pip', commands, dry_run) @classmethod def get_commands(cls, endpoint, auth_token, **kwargs): repo_uri = urlparse.urlsplit(endpoint) pip_index_url = cls.PIP_INDEX_URL_FMT.format( scheme=repo_uri.scheme, auth_token=auth_token, netloc=repo_uri.netloc, path=repo_uri.path ) return [['pip', 'config', 'set', 'global.index-url', pip_index_url]] class TwineLogin(BaseLogin): DEFAULT_PYPI_RC_FMT = u'''\ [distutils] index-servers= pypi codeartifact [codeartifact] repository: {repository_endpoint} username: aws password: {auth_token}''' def __init__( self, auth_token, expiration, repository_endpoint, domain, repository, subprocess_utils, pypi_rc_path=None ): if pypi_rc_path is None: pypi_rc_path = self.get_pypi_rc_path() self.pypi_rc_path = pypi_rc_path super(TwineLogin, self).__init__( auth_token, expiration, repository_endpoint, domain, repository, subprocess_utils) @classmethod def get_commands(cls, endpoint, auth_token, **kwargs): # TODO(ujjwalpa@): We don't really have a command to execute for Twine # as we directly write to the pypirc file (or to stdout for dryrun) # with python itself instead. Nevertheless, we're using this method for # testing so we'll keep the interface for now but return a string with # the expected pypirc content instead of a list of commands to # execute. This definitely reeks of code smell and there is probably # room for rethinking and refactoring the interfaces of these adapter # helper classes in the future. assert 'pypi_rc_path' in kwargs, 'pypi_rc_path must be provided.' pypi_rc_path = kwargs['pypi_rc_path'] default_pypi_rc = cls.DEFAULT_PYPI_RC_FMT.format( repository_endpoint=endpoint, auth_token=auth_token ) pypi_rc = RawConfigParser() if os.path.exists(pypi_rc_path): try: pypi_rc.read(pypi_rc_path) index_servers = pypi_rc.get('distutils', 'index-servers') servers = [ server.strip() for server in index_servers.split('\n') if server.strip() != '' ] if 'codeartifact' not in servers: servers.append('codeartifact') pypi_rc.set( 'distutils', 'index-servers', '\n' + '\n'.join(servers) ) if 'codeartifact' not in pypi_rc.sections(): pypi_rc.add_section('codeartifact') pypi_rc.set('codeartifact', 'repository', endpoint) pypi_rc.set('codeartifact', 'username', 'aws') pypi_rc.set('codeartifact', 'password', auth_token) except Exception as e: # invalid .pypirc file sys.stdout.write('%s is in an invalid state.' % pypi_rc_path) sys.stdout.write(os.linesep) raise e else: pypi_rc.readfp(StringIO(default_pypi_rc)) pypi_rc_stream = StringIO() pypi_rc.write(pypi_rc_stream) pypi_rc_str = pypi_rc_stream.getvalue() pypi_rc_stream.close() return pypi_rc_str def login(self, dry_run=False): # No command to execute for Twine, we get the expected pypirc content # instead. pypi_rc_str = self.get_commands( self.repository_endpoint, self.auth_token, pypi_rc_path=self.pypi_rc_path ) if dry_run: sys.stdout.write('Dryrun mode is enabled, not writing to pypirc.') sys.stdout.write(os.linesep) sys.stdout.write( '%s would have been set to the following:' % self.pypi_rc_path ) sys.stdout.write(os.linesep) sys.stdout.write(os.linesep) sys.stdout.write(pypi_rc_str) sys.stdout.write(os.linesep) else: with open(self.pypi_rc_path, 'w+') as fp: fp.write(pypi_rc_str) self._write_success_message('twine') @classmethod def get_pypi_rc_path(cls): return os.path.join(os.path.expanduser("~"), ".pypirc") class CodeArtifactLogin(BasicCommand): '''Log in to the idiomatic tool for the requested package format.''' TOOL_MAP = { 'nuget': { 'package_format': 'nuget', 'login_cls': NuGetLogin, 'namespace_support': False, }, 'dotnet': { 'package_format': 'nuget', 'login_cls': DotNetLogin, 'namespace_support': False, }, 'npm': { 'package_format': 'npm', 'login_cls': NpmLogin, 'namespace_support': True, }, 'pip': { 'package_format': 'pypi', 'login_cls': PipLogin, 'namespace_support': False, }, 'twine': { 'package_format': 'pypi', 'login_cls': TwineLogin, 'namespace_support': False, } } NAME = 'login' DESCRIPTION = ( 'Sets up the idiomatic tool for your package format to use your ' 'CodeArtifact repository. Your login information is valid for up ' 'to 12 hours after which you must login again.' ) ARG_TABLE = [ { 'name': 'tool', 'help_text': 'The tool you want to connect with your repository', 'choices': list(TOOL_MAP.keys()), 'required': True, }, { 'name': 'domain', 'help_text': 'Your CodeArtifact domain name', 'required': True, }, { 'name': 'domain-owner', 'help_text': 'The AWS account ID that owns your CodeArtifact ' 'domain', 'required': False, }, { 'name': 'namespace', 'help_text': 'Associates a namespace with your repository tool', 'required': False, }, { 'name': 'duration-seconds', 'cli_type_name': 'integer', 'help_text': 'The time, in seconds, that the login information ' 'is valid', 'required': False, }, { 'name': 'repository', 'help_text': 'Your CodeArtifact repository name', 'required': True, }, { 'name': 'dry-run', 'action': 'store_true', 'help_text': 'Only print the commands that would be executed ' 'to connect your tool with your repository without ' 'making any changes to your configuration', 'required': False, 'default': False }, ] def _get_namespace(self, tool, parsed_args): namespace_compatible = self.TOOL_MAP[tool]['namespace_support'] if not namespace_compatible and parsed_args.namespace: raise ValueError( 'Argument --namespace is not supported for {}'.format(tool) ) else: return parsed_args.namespace def _get_repository_endpoint( self, codeartifact_client, parsed_args, package_format ): kwargs = { 'domain': parsed_args.domain, 'repository': parsed_args.repository, 'format': package_format } if parsed_args.domain_owner: kwargs['domainOwner'] = parsed_args.domain_owner get_repository_endpoint_response = \ codeartifact_client.get_repository_endpoint(**kwargs) return get_repository_endpoint_response['repositoryEndpoint'] def _get_authorization_token(self, codeartifact_client, parsed_args): kwargs = { 'domain': parsed_args.domain } if parsed_args.domain_owner: kwargs['domainOwner'] = parsed_args.domain_owner if parsed_args.duration_seconds: kwargs['durationSeconds'] = parsed_args.duration_seconds get_authorization_token_response = \ codeartifact_client.get_authorization_token(**kwargs) return get_authorization_token_response def _run_main(self, parsed_args, parsed_globals): tool = parsed_args.tool.lower() package_format = self.TOOL_MAP[tool]['package_format'] codeartifact_client = cli_utils.create_client_from_parsed_globals( self._session, 'codeartifact', parsed_globals ) auth_token_res = self._get_authorization_token( codeartifact_client, parsed_args ) repository_endpoint = self._get_repository_endpoint( codeartifact_client, parsed_args, package_format ) domain = parsed_args.domain repository = parsed_args.repository namespace = self._get_namespace(tool, parsed_args) auth_token = auth_token_res['authorizationToken'] expiration = parse_timestamp(auth_token_res['expiration']) login = self.TOOL_MAP[tool]['login_cls']( auth_token, expiration, repository_endpoint, domain, repository, subprocess, namespace ) login.login(parsed_args.dry_run) return 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codecommit.py0000644000000000000000000001647700000000000022175 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import re import sys import logging import fileinput import datetime from botocore.auth import SigV4Auth from botocore.awsrequest import AWSRequest from botocore.compat import urlsplit from awscli.customizations.commands import BasicCommand from awscli.compat import NonTranslatedStdout logger = logging.getLogger('botocore.credentials') def initialize(cli): """ The entry point for the credential helper """ cli.register('building-command-table.codecommit', inject_commands) def inject_commands(command_table, session, **kwargs): """ Injects new commands into the codecommit subcommand. """ command_table['credential-helper'] = CodeCommitCommand(session) class CodeCommitNoOpStoreCommand(BasicCommand): NAME = 'store' DESCRIPTION = ('This operation does nothing, credentials' ' are calculated each time') SYNOPSIS = ('aws codecommit credential-helper store') EXAMPLES = '' _UNDOCUMENTED = True def _run_main(self, args, parsed_globals): return 0 class CodeCommitNoOpEraseCommand(BasicCommand): NAME = 'erase' DESCRIPTION = ('This operation does nothing, no credentials' ' are ever stored') SYNOPSIS = ('aws codecommit credential-helper erase') EXAMPLES = '' _UNDOCUMENTED = True def _run_main(self, args, parsed_globals): return 0 class CodeCommitGetCommand(BasicCommand): NAME = 'get' DESCRIPTION = ('get a username SigV4 credential pair' ' based on protocol, host and path provided' ' from standard in. This is primarily' ' called by git to generate credentials to' ' authenticate against AWS CodeCommit') SYNOPSIS = ('aws codecommit credential-helper get') EXAMPLES = (r'echo -e "protocol=https\\n' r'path=/v1/repos/myrepo\\n' 'host=git-codecommit.us-east-1.amazonaws.com"' ' | aws codecommit credential-helper get') ARG_TABLE = [ { 'name': 'ignore-host-check', 'action': 'store_true', 'default': False, 'group_name': 'ignore-host-check', 'help_text': ( 'Optional. Generate credentials regardless of whether' ' the domain is an Amazon domain.' ) } ] def __init__(self, session): super(CodeCommitGetCommand, self).__init__(session) def _run_main(self, args, parsed_globals): git_parameters = self.read_git_parameters() if ('amazon.com' in git_parameters['host'] or 'amazonaws.com' in git_parameters['host'] or args.ignore_host_check): theUrl = self.extract_url(git_parameters) region = self.extract_region(git_parameters, parsed_globals) signature = self.sign_request(region, theUrl) self.write_git_parameters(signature) return 0 def write_git_parameters(self, signature): username = self._session.get_credentials().access_key if self._session.get_credentials().token is not None: username += "%" + self._session.get_credentials().token # Python will add a \r to the line ending for a text stdout in Windows. # Git does not like the \r, so switch to binary with NonTranslatedStdout() as binary_stdout: binary_stdout.write('username={0}\n'.format(username)) logger.debug('username\n%s', username) binary_stdout.write('password={0}\n'.format(signature)) # need to explicitly flush the buffer here, # before we turn the stream back to text for windows binary_stdout.flush() logger.debug('signature\n%s', signature) def read_git_parameters(self): parsed = {} for line in sys.stdin: line = line.strip() if line: key, value = line.split('=', 1) parsed[key] = value return parsed def extract_url(self, parameters): url = '{0}://{1}/{2}'.format(parameters['protocol'], parameters['host'], parameters['path']) return url def extract_region(self, parameters, parsed_globals): match = re.match(r'(vpce-.+\.)?git-codecommit(-fips)?\.([^.]+)\.(vpce\.)?amazonaws\.com', parameters['host']) if match is not None: return match.group(3) elif parsed_globals.region is not None: return parsed_globals.region else: return self._session.get_config_variable('region') def sign_request(self, region, url_to_sign): credentials = self._session.get_credentials() signer = SigV4Auth(credentials, 'codecommit', region) request = AWSRequest() request.url = url_to_sign request.method = 'GIT' now = datetime.datetime.utcnow() request.context['timestamp'] = now.strftime('%Y%m%dT%H%M%S') split = urlsplit(request.url) # we don't want to include the port number in the signature hostname = split.netloc.split(':')[0] canonical_request = '{0}\n{1}\n\nhost:{2}\n\nhost\n'.format( request.method, split.path, hostname) logger.debug("Calculating signature using v4 auth.") logger.debug('CanonicalRequest:\n%s', canonical_request) string_to_sign = signer.string_to_sign(request, canonical_request) logger.debug('StringToSign:\n%s', string_to_sign) signature = signer.signature(string_to_sign, request) logger.debug('Signature:\n%s', signature) return '{0}Z{1}'.format(request.context['timestamp'], signature) class CodeCommitCommand(BasicCommand): NAME = 'credential-helper' SYNOPSIS = ('aws codecommit credential-helper') EXAMPLES = '' SUBCOMMANDS = [ {'name': 'get', 'command_class': CodeCommitGetCommand}, {'name': 'store', 'command_class': CodeCommitNoOpStoreCommand}, {'name': 'erase', 'command_class': CodeCommitNoOpEraseCommand}, ] DESCRIPTION = ('Provide a SigV4 compatible user name and' ' password for git smart HTTP ' ' These commands are consumed by git and' ' should not used directly. Erase and Store' ' are no-ops. Get is operation to generate' ' credentials to authenticate AWS CodeCommit.' ' Run \"aws codecommit credential-helper help\"' ' for details') def _run_main(self, args, parsed_globals): raise ValueError('usage: aws [options] codecommit' ' credential-helper ' '[parameters]\naws: error: too few arguments') ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0892723 awscli-1.22.34/awscli/customizations/codedeploy/0000755000000000000000000000000000000000000021610 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/__init__.py0000644000000000000000000000106500000000000023723 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/codedeploy.py0000644000000000000000000000424400000000000024315 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from awscli.customizations import utils from awscli.customizations.codedeploy.locationargs import \ modify_revision_arguments from awscli.customizations.codedeploy.push import Push from awscli.customizations.codedeploy.register import Register from awscli.customizations.codedeploy.deregister import Deregister from awscli.customizations.codedeploy.install import Install from awscli.customizations.codedeploy.uninstall import Uninstall def initialize(cli): """ The entry point for CodeDeploy high level commands. """ cli.register( 'building-command-table.main', change_name ) cli.register( 'building-command-table.deploy', inject_commands ) cli.register( 'building-argument-table.deploy.get-application-revision', modify_revision_arguments ) cli.register( 'building-argument-table.deploy.register-application-revision', modify_revision_arguments ) cli.register( 'building-argument-table.deploy.create-deployment', modify_revision_arguments ) def change_name(command_table, session, **kwargs): """ Change all existing 'aws codedeploy' commands to 'aws deploy' commands. """ utils.rename_command(command_table, 'codedeploy', 'deploy') def inject_commands(command_table, session, **kwargs): """ Inject custom 'aws deploy' commands. """ command_table['push'] = Push(session) command_table['register'] = Register(session) command_table['deregister'] = Deregister(session) command_table['install'] = Install(session) command_table['uninstall'] = Uninstall(session) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/deregister.py0000644000000000000000000001407500000000000024326 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys from botocore.exceptions import ClientError from awscli.customizations.commands import BasicCommand from awscli.customizations.codedeploy.utils import \ validate_region, validate_instance_name, INSTANCE_NAME_ARG class Deregister(BasicCommand): NAME = 'deregister' DESCRIPTION = ( 'Removes any tags from the on-premises instance; deregisters the ' 'on-premises instance from AWS CodeDeploy; and, unless requested ' 'otherwise, deletes the IAM user for the on-premises instance.' ) ARG_TABLE = [ INSTANCE_NAME_ARG, { 'name': 'no-delete-iam-user', 'action': 'store_true', 'default': False, 'help_text': ( 'Optional. Do not delete the IAM user for the registered ' 'on-premises instance.' ) } ] def _run_main(self, parsed_args, parsed_globals): params = parsed_args params.session = self._session validate_region(params, parsed_globals) validate_instance_name(params) self.codedeploy = self._session.create_client( 'codedeploy', region_name=params.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl ) self.iam = self._session.create_client( 'iam', region_name=params.region ) try: self._get_instance_info(params) if params.tags: self._remove_tags(params) self._deregister_instance(params) if not params.no_delete_iam_user: self._delete_user_policy(params) self._delete_access_key(params) self._delete_iam_user(params) sys.stdout.write( 'Run the following command on the on-premises instance to ' 'uninstall the codedeploy-agent:\n' 'aws deploy uninstall\n' ) except Exception as e: sys.stdout.flush() sys.stderr.write( 'ERROR\n' '{0}\n' 'Deregister the on-premises instance by following the ' 'instructions in "Configure Existing On-Premises Instances by ' 'Using AWS CodeDeploy" in the AWS CodeDeploy User ' 'Guide.\n'.format(e) ) def _get_instance_info(self, params): sys.stdout.write('Retrieving on-premises instance information... ') response = self.codedeploy.get_on_premises_instance( instanceName=params.instance_name ) params.iam_user_arn = response['instanceInfo']['iamUserArn'] start = params.iam_user_arn.rfind('/') + 1 params.user_name = params.iam_user_arn[start:] params.tags = response['instanceInfo']['tags'] sys.stdout.write( 'DONE\n' 'IamUserArn: {0}\n'.format( params.iam_user_arn ) ) if params.tags: sys.stdout.write('Tags:') for tag in params.tags: sys.stdout.write( ' Key={0},Value={1}'.format(tag['Key'], tag['Value']) ) sys.stdout.write('\n') def _remove_tags(self, params): sys.stdout.write('Removing tags from the on-premises instance... ') self.codedeploy.remove_tags_from_on_premises_instances( tags=params.tags, instanceNames=[params.instance_name] ) sys.stdout.write('DONE\n') def _deregister_instance(self, params): sys.stdout.write('Deregistering the on-premises instance... ') self.codedeploy.deregister_on_premises_instance( instanceName=params.instance_name ) sys.stdout.write('DONE\n') def _delete_user_policy(self, params): sys.stdout.write('Deleting the IAM user policies... ') list_user_policies = self.iam.get_paginator('list_user_policies') try: for response in list_user_policies.paginate( UserName=params.user_name): for policy_name in response['PolicyNames']: self.iam.delete_user_policy( UserName=params.user_name, PolicyName=policy_name ) except ClientError as e: if e.response.get('Error', {}).get('Code') != 'NoSuchEntity': raise e sys.stdout.write('DONE\n') def _delete_access_key(self, params): sys.stdout.write('Deleting the IAM user access keys... ') list_access_keys = self.iam.get_paginator('list_access_keys') try: for response in list_access_keys.paginate( UserName=params.user_name): for access_key in response['AccessKeyMetadata']: self.iam.delete_access_key( UserName=params.user_name, AccessKeyId=access_key['AccessKeyId'] ) except ClientError as e: if e.response.get('Error', {}).get('Code') != 'NoSuchEntity': raise e sys.stdout.write('DONE\n') def _delete_iam_user(self, params): sys.stdout.write('Deleting the IAM user ({0})... '.format( params.user_name )) try: self.iam.delete_user(UserName=params.user_name) except ClientError as e: if e.response.get('Error', {}).get('Code') != 'NoSuchEntity': raise e sys.stdout.write('DONE\n') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/install.py0000644000000000000000000001015500000000000023632 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import errno import os import shutil import sys from awscli.customizations.commands import BasicCommand from awscli.customizations.codedeploy.utils import \ validate_region, validate_s3_location, validate_instance class Install(BasicCommand): NAME = 'install' DESCRIPTION = ( 'Configures and installs the AWS CodeDeploy Agent on the on-premises ' 'instance.' ) ARG_TABLE = [ { 'name': 'config-file', 'synopsis': '--config-file ', 'required': True, 'help_text': ( 'Required. The path to the on-premises instance configuration ' 'file.' ) }, { 'name': 'override-config', 'action': 'store_true', 'default': False, 'help_text': ( 'Optional. Overrides the on-premises instance configuration ' 'file.' ) }, { 'name': 'agent-installer', 'synopsis': '--agent-installer ', 'required': False, 'help_text': ( 'Optional. The AWS CodeDeploy Agent installer file.' ) } ] def _run_main(self, parsed_args, parsed_globals): params = parsed_args params.session = self._session validate_region(params, parsed_globals) validate_instance(params) params.system.validate_administrator() self._validate_override_config(params) self._validate_agent_installer(params) try: self._create_config(params) self._install_agent(params) except Exception as e: sys.stdout.flush() sys.stderr.write( 'ERROR\n' '{0}\n' 'Install the AWS CodeDeploy Agent on the on-premises instance ' 'by following the instructions in "Configure Existing ' 'On-Premises Instances by Using AWS CodeDeploy" in the AWS ' 'CodeDeploy User Guide.\n'.format(e) ) def _validate_override_config(self, params): if os.path.isfile(params.system.CONFIG_PATH) and \ not params.override_config: raise RuntimeError( 'The on-premises instance configuration file already exists. ' 'Specify --override-config to update the existing on-premises ' 'instance configuration file.' ) def _validate_agent_installer(self, params): validate_s3_location(params, 'agent_installer') if 'bucket' not in params: params.bucket = 'aws-codedeploy-{0}'.format(params.region) if 'key' not in params: params.key = 'latest/{0}'.format(params.system.INSTALLER) params.installer = params.system.INSTALLER else: start = params.key.rfind('/') + 1 params.installer = params.key[start:] def _create_config(self, params): sys.stdout.write( 'Creating the on-premises instance configuration file... ' ) try: os.makedirs(params.system.CONFIG_DIR) except OSError as e: if e.errno != errno.EEXIST: raise e if params.config_file != params.system.CONFIG_PATH: shutil.copyfile(params.config_file, params.system.CONFIG_PATH) sys.stdout.write('DONE\n') def _install_agent(self, params): sys.stdout.write('Installing the AWS CodeDeploy Agent... ') params.system.install(params) sys.stdout.write('DONE\n') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/locationargs.py0000644000000000000000000001342600000000000024655 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from awscli.argprocess import unpack_cli_arg from awscli.arguments import CustomArgument from awscli.arguments import create_argument_model_from_schema S3_LOCATION_ARG_DESCRIPTION = { 'name': 's3-location', 'required': False, 'help_text': ( 'Information about the location of the application revision in Amazon ' 'S3. You must specify the bucket, the key, and bundleType. ' 'Optionally, you can also specify an eTag and version.' ) } S3_LOCATION_SCHEMA = { "type": "object", "properties": { "bucket": { "type": "string", "description": "The Amazon S3 bucket name.", "required": True }, "key": { "type": "string", "description": "The Amazon S3 object key name.", "required": True }, "bundleType": { "type": "string", "description": "The format of the bundle stored in Amazon S3.", "enum": ["tar", "tgz", "zip"], "required": True }, "eTag": { "type": "string", "description": "The Amazon S3 object eTag.", "required": False }, "version": { "type": "string", "description": "The Amazon S3 object version.", "required": False } } } GITHUB_LOCATION_ARG_DESCRIPTION = { 'name': 'github-location', 'required': False, 'help_text': ( 'Information about the location of the application revision in ' 'GitHub. You must specify the repository and commit ID that ' 'references the application revision. For the repository, use the ' 'format GitHub-account/repository-name or GitHub-org/repository-name. ' 'For the commit ID, use the SHA1 Git commit reference.' ) } GITHUB_LOCATION_SCHEMA = { "type": "object", "properties": { "repository": { "type": "string", "description": ( "The GitHub account or organization and repository. Specify " "as GitHub-account/repository or GitHub-org/repository." ), "required": True }, "commitId": { "type": "string", "description": "The SHA1 Git commit reference.", "required": True } } } def modify_revision_arguments(argument_table, session, **kwargs): s3_model = create_argument_model_from_schema(S3_LOCATION_SCHEMA) argument_table[S3_LOCATION_ARG_DESCRIPTION['name']] = ( S3LocationArgument( argument_model=s3_model, session=session, **S3_LOCATION_ARG_DESCRIPTION ) ) github_model = create_argument_model_from_schema(GITHUB_LOCATION_SCHEMA) argument_table[GITHUB_LOCATION_ARG_DESCRIPTION['name']] = ( GitHubLocationArgument( argument_model=github_model, session=session, **GITHUB_LOCATION_ARG_DESCRIPTION ) ) argument_table['revision'].required = False class LocationArgument(CustomArgument): def __init__(self, session, *args, **kwargs): super(LocationArgument, self).__init__(*args, **kwargs) self._session = session def add_to_params(self, parameters, value): if value is None: return parsed = self._session.emit_first_non_none_response( 'process-cli-arg.codedeploy.%s' % self.name, param=self.argument_model, cli_argument=self, value=value, operation=None ) if parsed is None: parsed = unpack_cli_arg(self, value) parameters['revision'] = self.build_revision_location(parsed) def build_revision_location(self, value_dict): """ Repack the input structure into a revisionLocation. """ raise NotImplementedError("build_revision_location") class S3LocationArgument(LocationArgument): def build_revision_location(self, value_dict): required = ['bucket', 'key', 'bundleType'] valid = lambda k: value_dict.get(k, False) if not all(map(valid, required)): raise RuntimeError( '--s3-location must specify bucket, key and bundleType.' ) revision = { "revisionType": "S3", "s3Location": { "bucket": value_dict['bucket'], "key": value_dict['key'], "bundleType": value_dict['bundleType'] } } if 'eTag' in value_dict: revision['s3Location']['eTag'] = value_dict['eTag'] if 'version' in value_dict: revision['s3Location']['version'] = value_dict['version'] return revision class GitHubLocationArgument(LocationArgument): def build_revision_location(self, value_dict): required = ['repository', 'commitId'] valid = lambda k: value_dict.get(k, False) if not all(map(valid, required)): raise RuntimeError( '--github-location must specify repository and commitId.' ) return { "revisionType": "GitHub", "gitHubLocation": { "repository": value_dict['repository'], "commitId": value_dict['commitId'] } } ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/push.py0000644000000000000000000002466300000000000023154 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import sys import zipfile import tempfile import contextlib from datetime import datetime from botocore.exceptions import ClientError from awscli.compat import six from awscli.customizations.codedeploy.utils import validate_s3_location from awscli.customizations.commands import BasicCommand from awscli.compat import ZIP_COMPRESSION_MODE ONE_MB = 1 << 20 MULTIPART_LIMIT = 6 * ONE_MB class Push(BasicCommand): NAME = 'push' DESCRIPTION = ( 'Bundles and uploads to Amazon Simple Storage Service (Amazon S3) an ' 'application revision, which is a zip archive file that contains ' 'deployable content and an accompanying Application Specification ' 'file (AppSpec file). If the upload is successful, a message is ' 'returned that describes how to call the create-deployment command to ' 'deploy the application revision from Amazon S3 to target Amazon ' 'Elastic Compute Cloud (Amazon EC2) instances.' ) ARG_TABLE = [ { 'name': 'application-name', 'synopsis': '--application-name ', 'required': True, 'help_text': ( 'Required. The name of the AWS CodeDeploy application to be ' 'associated with the application revision.' ) }, { 'name': 's3-location', 'synopsis': '--s3-location s3:///', 'required': True, 'help_text': ( 'Required. Information about the location of the application ' 'revision to be uploaded to Amazon S3. You must specify both ' 'a bucket and a key that represent the Amazon S3 bucket name ' 'and the object key name. Content will be zipped before ' 'uploading. Use the format s3://\/\' ) }, { 'name': 'ignore-hidden-files', 'action': 'store_true', 'default': False, 'group_name': 'ignore-hidden-files', 'help_text': ( 'Optional. Set the --ignore-hidden-files flag to not bundle ' 'and upload hidden files to Amazon S3; otherwise, set the ' '--no-ignore-hidden-files flag (the default) to bundle and ' 'upload hidden files to Amazon S3.' ) }, { 'name': 'no-ignore-hidden-files', 'action': 'store_true', 'default': False, 'group_name': 'ignore-hidden-files' }, { 'name': 'source', 'synopsis': '--source ', 'default': '.', 'help_text': ( 'Optional. The location of the deployable content and the ' 'accompanying AppSpec file on the development machine to be ' 'zipped and uploaded to Amazon S3. If not specified, the ' 'current directory is used.' ) }, { 'name': 'description', 'synopsis': '--description ', 'help_text': ( 'Optional. A comment that summarizes the application ' 'revision. If not specified, the default string "Uploaded by ' 'AWS CLI \'time\' UTC" is used, where \'time\' is the current ' 'system time in Coordinated Universal Time (UTC).' ) } ] def _run_main(self, parsed_args, parsed_globals): self._validate_args(parsed_args) self.codedeploy = self._session.create_client( 'codedeploy', region_name=parsed_globals.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl ) self.s3 = self._session.create_client( 's3', region_name=parsed_globals.region ) self._push(parsed_args) def _validate_args(self, parsed_args): validate_s3_location(parsed_args, 's3_location') if parsed_args.ignore_hidden_files \ and parsed_args.no_ignore_hidden_files: raise RuntimeError( 'You cannot specify both --ignore-hidden-files and ' '--no-ignore-hidden-files.' ) if not parsed_args.description: parsed_args.description = ( 'Uploaded by AWS CLI {0} UTC'.format( datetime.utcnow().isoformat() ) ) def _push(self, params): with self._compress( params.source, params.ignore_hidden_files ) as bundle: try: upload_response = self._upload_to_s3(params, bundle) params.eTag = upload_response['ETag'].replace('"', "") if 'VersionId' in upload_response: params.version = upload_response['VersionId'] except Exception as e: raise RuntimeError( 'Failed to upload \'%s\' to \'%s\': %s' % (params.source, params.s3_location, str(e)) ) self._register_revision(params) if 'version' in params: version_string = ',version={0}'.format(params.version) else: version_string = '' s3location_string = ( '--s3-location bucket={0},key={1},' 'bundleType=zip,eTag={2}{3}'.format( params.bucket, params.key, params.eTag, version_string ) ) sys.stdout.write( 'To deploy with this revision, run:\n' 'aws deploy create-deployment ' '--application-name {0} {1} ' '--deployment-group-name ' '--deployment-config-name ' '--description \n'.format( params.application_name, s3location_string ) ) @contextlib.contextmanager def _compress(self, source, ignore_hidden_files=False): source_path = os.path.abspath(source) appspec_path = os.path.sep.join([source_path, 'appspec.yml']) with tempfile.TemporaryFile('w+b') as tf: zf = zipfile.ZipFile(tf, 'w', allowZip64=True) # Using 'try'/'finally' instead of 'with' statement since ZipFile # does not have support context manager in Python 2.6. try: contains_appspec = False for root, dirs, files in os.walk(source, topdown=True): if ignore_hidden_files: files = [fn for fn in files if not fn.startswith('.')] dirs[:] = [dn for dn in dirs if not dn.startswith('.')] for fn in files: filename = os.path.join(root, fn) filename = os.path.abspath(filename) arcname = filename[len(source_path) + 1:] if filename == appspec_path: contains_appspec = True zf.write(filename, arcname, ZIP_COMPRESSION_MODE) if not contains_appspec: raise RuntimeError( '{0} was not found'.format(appspec_path) ) finally: zf.close() yield tf def _upload_to_s3(self, params, bundle): size_remaining = self._bundle_size(bundle) if size_remaining < MULTIPART_LIMIT: return self.s3.put_object( Bucket=params.bucket, Key=params.key, Body=bundle ) else: return self._multipart_upload_to_s3( params, bundle, size_remaining ) def _bundle_size(self, bundle): bundle.seek(0, 2) size = bundle.tell() bundle.seek(0) return size def _multipart_upload_to_s3(self, params, bundle, size_remaining): create_response = self.s3.create_multipart_upload( Bucket=params.bucket, Key=params.key ) upload_id = create_response['UploadId'] try: part_num = 1 multipart_list = [] bundle.seek(0) while size_remaining > 0: data = bundle.read(MULTIPART_LIMIT) upload_response = self.s3.upload_part( Bucket=params.bucket, Key=params.key, UploadId=upload_id, PartNumber=part_num, Body=six.BytesIO(data) ) multipart_list.append({ 'PartNumber': part_num, 'ETag': upload_response['ETag'] }) part_num += 1 size_remaining -= len(data) return self.s3.complete_multipart_upload( Bucket=params.bucket, Key=params.key, UploadId=upload_id, MultipartUpload={'Parts': multipart_list} ) except ClientError as e: self.s3.abort_multipart_upload( Bucket=params.bucket, Key=params.key, UploadId=upload_id ) raise e def _register_revision(self, params): revision = { 'revisionType': 'S3', 's3Location': { 'bucket': params.bucket, 'key': params.key, 'bundleType': 'zip', 'eTag': params.eTag } } if 'version' in params: revision['s3Location']['version'] = params.version self.codedeploy.register_application_revision( applicationName=params.application_name, revision=revision, description=params.description ) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/register.py0000644000000000000000000001603400000000000024012 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys from awscli.customizations.commands import BasicCommand from awscli.customizations.codedeploy.systems import DEFAULT_CONFIG_FILE from awscli.customizations.codedeploy.utils import \ validate_region, validate_instance_name, validate_tags, \ validate_iam_user_arn, INSTANCE_NAME_ARG, IAM_USER_ARN_ARG class Register(BasicCommand): NAME = 'register' DESCRIPTION = ( "Creates an IAM user for the on-premises instance, if not provided, " "and saves the user's credentials to an on-premises instance " "configuration file; registers the on-premises instance with AWS " "CodeDeploy; and optionally adds tags to the on-premises instance." ) TAGS_SCHEMA = { "type": "array", "items": { "type": "object", "properties": { "Key": { "description": "The tag key.", "type": "string", "required": True }, "Value": { "description": "The tag value.", "type": "string", "required": True } } } } ARG_TABLE = [ INSTANCE_NAME_ARG, { 'name': 'tags', 'synopsis': '--tags ', 'required': False, 'nargs': '+', 'schema': TAGS_SCHEMA, 'help_text': ( 'Optional. The list of key/value pairs to tag the on-premises ' 'instance.' ) }, IAM_USER_ARN_ARG ] def _run_main(self, parsed_args, parsed_globals): params = parsed_args params.session = self._session validate_region(params, parsed_globals) validate_instance_name(params) validate_tags(params) validate_iam_user_arn(params) self.codedeploy = self._session.create_client( 'codedeploy', region_name=params.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl ) self.iam = self._session.create_client( 'iam', region_name=params.region ) try: if not params.iam_user_arn: self._create_iam_user(params) self._create_access_key(params) self._create_user_policy(params) self._create_config(params) self._register_instance(params) if params.tags: self._add_tags(params) sys.stdout.write( 'Copy the on-premises configuration file named {0} to the ' 'on-premises instance, and run the following command on the ' 'on-premises instance to install and configure the AWS ' 'CodeDeploy Agent:\n' 'aws deploy install --config-file {0}\n'.format( DEFAULT_CONFIG_FILE ) ) except Exception as e: sys.stdout.flush() sys.stderr.write( 'ERROR\n' '{0}\n' 'Register the on-premises instance by following the ' 'instructions in "Configure Existing On-Premises Instances by ' 'Using AWS CodeDeploy" in the AWS CodeDeploy User ' 'Guide.\n'.format(e) ) def _create_iam_user(self, params): sys.stdout.write('Creating the IAM user... ') params.user_name = params.instance_name response = self.iam.create_user( Path='/AWS/CodeDeploy/', UserName=params.user_name ) params.iam_user_arn = response['User']['Arn'] sys.stdout.write( 'DONE\n' 'IamUserArn: {0}\n'.format( params.iam_user_arn ) ) def _create_access_key(self, params): sys.stdout.write('Creating the IAM user access key... ') response = self.iam.create_access_key( UserName=params.user_name ) params.access_key_id = response['AccessKey']['AccessKeyId'] params.secret_access_key = response['AccessKey']['SecretAccessKey'] sys.stdout.write( 'DONE\n' 'AccessKeyId: {0}\n' 'SecretAccessKey: {1}\n'.format( params.access_key_id, params.secret_access_key ) ) def _create_user_policy(self, params): sys.stdout.write('Creating the IAM user policy... ') params.policy_name = 'codedeploy-agent' params.policy_document = ( '{\n' ' "Version": "2012-10-17",\n' ' "Statement": [ {\n' ' "Action": [ "s3:Get*", "s3:List*" ],\n' ' "Effect": "Allow",\n' ' "Resource": "*"\n' ' } ]\n' '}' ) self.iam.put_user_policy( UserName=params.user_name, PolicyName=params.policy_name, PolicyDocument=params.policy_document ) sys.stdout.write( 'DONE\n' 'PolicyName: {0}\n' 'PolicyDocument: {1}\n'.format( params.policy_name, params.policy_document ) ) def _create_config(self, params): sys.stdout.write( 'Creating the on-premises instance configuration file named {0}' '...'.format(DEFAULT_CONFIG_FILE) ) with open(DEFAULT_CONFIG_FILE, 'w') as f: f.write( '---\n' 'region: {0}\n' 'iam_user_arn: {1}\n' 'aws_access_key_id: {2}\n' 'aws_secret_access_key: {3}\n'.format( params.region, params.iam_user_arn, params.access_key_id, params.secret_access_key ) ) sys.stdout.write('DONE\n') def _register_instance(self, params): sys.stdout.write('Registering the on-premises instance... ') self.codedeploy.register_on_premises_instance( instanceName=params.instance_name, iamUserArn=params.iam_user_arn ) sys.stdout.write('DONE\n') def _add_tags(self, params): sys.stdout.write('Adding tags to the on-premises instance... ') self.codedeploy.add_tags_to_on_premises_instances( tags=params.tags, instanceNames=[params.instance_name] ) sys.stdout.write('DONE\n') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/systems.py0000644000000000000000000001675500000000000023707 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import ctypes import os import subprocess DEFAULT_CONFIG_FILE = 'codedeploy.onpremises.yml' class System: UNSUPPORTED_SYSTEM_MSG = ( 'Only Ubuntu Server, Red Hat Enterprise Linux Server and ' 'Windows Server operating systems are supported.' ) def __init__(self, params): self.session = params.session self.s3 = self.session.create_client( 's3', region_name=params.region ) def validate_administrator(self): raise NotImplementedError('validate_administrator') def install(self, params): raise NotImplementedError('install') def uninstall(self, params): raise NotImplementedError('uninstall') class Windows(System): CONFIG_DIR = r'C:\ProgramData\Amazon\CodeDeploy' CONFIG_FILE = 'conf.onpremises.yml' CONFIG_PATH = r'{0}\{1}'.format(CONFIG_DIR, CONFIG_FILE) INSTALLER = 'codedeploy-agent.msi' def validate_administrator(self): if not ctypes.windll.shell32.IsUserAnAdmin(): raise RuntimeError( 'You must run this command as an Administrator.' ) def install(self, params): if 'installer' in params: self.INSTALLER = params.installer process = subprocess.Popen( [ 'powershell.exe', '-Command', 'Stop-Service', '-Name', 'codedeployagent' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) (output, error) = process.communicate() not_found = ( "Cannot find any service with service name 'codedeployagent'" ) if process.returncode != 0 and not_found not in error: raise RuntimeError( 'Failed to stop the AWS CodeDeploy Agent:\n{0}'.format(error) ) response = self.s3.get_object(Bucket=params.bucket, Key=params.key) with open(self.INSTALLER, 'wb') as f: f.write(response['Body'].read()) subprocess.check_call( [ r'.\{0}'.format(self.INSTALLER), '/quiet', '/l', r'.\codedeploy-agent-install-log.txt' ], shell=True ) subprocess.check_call([ 'powershell.exe', '-Command', 'Restart-Service', '-Name', 'codedeployagent' ]) process = subprocess.Popen( [ 'powershell.exe', '-Command', 'Get-Service', '-Name', 'codedeployagent' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) (output, error) = process.communicate() if "Running" not in output: raise RuntimeError( 'The AWS CodeDeploy Agent did not start after installation.' ) def uninstall(self, params): process = subprocess.Popen( [ 'powershell.exe', '-Command', 'Stop-Service', '-Name', 'codedeployagent' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) (output, error) = process.communicate() not_found = ( "Cannot find any service with service name 'codedeployagent'" ) if process.returncode == 0: self._remove_agent() elif not_found not in error: raise RuntimeError( 'Failed to stop the AWS CodeDeploy Agent:\n{0}'.format(error) ) def _remove_agent(self): process = subprocess.Popen( [ 'wmic', 'product', 'where', 'name="CodeDeploy Host Agent"', 'call', 'uninstall', '/nointeractive' ], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) (output, error) = process.communicate() if process.returncode != 0: raise RuntimeError( 'Failed to uninstall the AWS CodeDeploy Agent:\n{0}'.format( error ) ) class Linux(System): CONFIG_DIR = '/etc/codedeploy-agent/conf' CONFIG_FILE = DEFAULT_CONFIG_FILE CONFIG_PATH = '{0}/{1}'.format(CONFIG_DIR, CONFIG_FILE) INSTALLER = 'install' def validate_administrator(self): if os.geteuid() != 0: raise RuntimeError('You must run this command as sudo.') def install(self, params): if 'installer' in params: self.INSTALLER = params.installer self._update_system(params) self._stop_agent(params) response = self.s3.get_object(Bucket=params.bucket, Key=params.key) with open(self.INSTALLER, 'wb') as f: f.write(response['Body'].read()) subprocess.check_call( ['chmod', '+x', './{0}'.format(self.INSTALLER)] ) credentials = self.session.get_credentials() environment = os.environ.copy() environment['AWS_REGION'] = params.region environment['AWS_ACCESS_KEY_ID'] = credentials.access_key environment['AWS_SECRET_ACCESS_KEY'] = credentials.secret_key if credentials.token is not None: environment['AWS_SESSION_TOKEN'] = credentials.token subprocess.check_call( ['./{0}'.format(self.INSTALLER), 'auto'], env=environment ) def uninstall(self, params): process = self._stop_agent(params) if process.returncode == 0: self._remove_agent(params) def _update_system(self, params): raise NotImplementedError('preinstall') def _remove_agent(self, params): raise NotImplementedError('remove_agent') def _stop_agent(self, params): process = subprocess.Popen( ['service', 'codedeploy-agent', 'stop'], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) (output, error) = process.communicate() if process.returncode != 0 and params.not_found_msg not in error: raise RuntimeError( 'Failed to stop the AWS CodeDeploy Agent:\n{0}'.format(error) ) return process class Ubuntu(Linux): def _update_system(self, params): subprocess.check_call(['apt-get', '-y', 'update']) subprocess.check_call(['apt-get', '-y', 'install', 'ruby2.0']) def _remove_agent(self, params): subprocess.check_call(['dpkg', '-r', 'codedeploy-agent']) def _stop_agent(self, params): params.not_found_msg = 'codedeploy-agent: unrecognized service' return Linux._stop_agent(self, params) class RHEL(Linux): def _update_system(self, params): subprocess.check_call(['yum', '-y', 'install', 'ruby']) def _remove_agent(self, params): subprocess.check_call(['yum', '-y', 'erase', 'codedeploy-agent']) def _stop_agent(self, params): params.not_found_msg = 'Redirecting to /bin/systemctl stop codedeploy-agent.service' return Linux._stop_agent(self, params) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/uninstall.py0000644000000000000000000000423200000000000024174 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import sys import errno from awscli.customizations.codedeploy.utils import validate_instance, \ validate_region from awscli.customizations.commands import BasicCommand class Uninstall(BasicCommand): NAME = 'uninstall' DESCRIPTION = ( 'Uninstalls the AWS CodeDeploy Agent from the on-premises instance.' ) def _run_main(self, parsed_args, parsed_globals): params = parsed_args params.session = self._session validate_region(params, parsed_globals) validate_instance(params) params.system.validate_administrator() try: self._uninstall_agent(params) self._delete_config_file(params) except Exception as e: sys.stdout.flush() sys.stderr.write( 'ERROR\n' '{0}\n' 'Uninstall the AWS CodeDeploy Agent on the on-premises ' 'instance by following the instructions in "Configure ' 'Existing On-Premises Instances by Using AWS CodeDeploy" in ' 'the AWS CodeDeploy User Guide.\n'.format(e) ) def _uninstall_agent(self, params): sys.stdout.write('Uninstalling the AWS CodeDeploy Agent... ') params.system.uninstall(params) sys.stdout.write('DONE\n') def _delete_config_file(self, params): sys.stdout.write('Deleting the on-premises instance configuration... ') try: os.remove(params.system.CONFIG_PATH) except OSError as e: if e.errno != errno.ENOENT: raise e sys.stdout.write('DONE\n') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/codedeploy/utils.py0000644000000000000000000001104000000000000023316 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import platform import re import awscli.compat from awscli.compat import urlopen, URLError from awscli.customizations.codedeploy.systems import System, Ubuntu, Windows, RHEL from socket import timeout MAX_INSTANCE_NAME_LENGTH = 100 MAX_TAGS_PER_INSTANCE = 10 MAX_TAG_KEY_LENGTH = 128 MAX_TAG_VALUE_LENGTH = 256 INSTANCE_NAME_PATTERN = r'^[A-Za-z0-9+=,.@_-]+$' IAM_USER_ARN_PATTERN = r'^arn:aws:iam::[0-9]{12}:user/[A-Za-z0-9/+=,.@_-]+$' INSTANCE_NAME_ARG = { 'name': 'instance-name', 'synopsis': '--instance-name ', 'required': True, 'help_text': ( 'Required. The name of the on-premises instance.' ) } IAM_USER_ARN_ARG = { 'name': 'iam-user-arn', 'synopsis': '--iam-user-arn ', 'required': False, 'help_text': ( 'Optional. The IAM user associated with the on-premises instance.' ) } def validate_region(params, parsed_globals): if parsed_globals.region: params.region = parsed_globals.region else: params.region = params.session.get_config_variable('region') if not params.region: raise RuntimeError('Region not specified.') def validate_instance_name(params): if params.instance_name: if not re.match(INSTANCE_NAME_PATTERN, params.instance_name): raise ValueError('Instance name contains invalid characters.') if params.instance_name.startswith('i-'): raise ValueError('Instance name cannot start with \'i-\'.') if len(params.instance_name) > MAX_INSTANCE_NAME_LENGTH: raise ValueError( 'Instance name cannot be longer than {0} characters.'.format( MAX_INSTANCE_NAME_LENGTH ) ) def validate_tags(params): if params.tags: if len(params.tags) > MAX_TAGS_PER_INSTANCE: raise ValueError( 'Instances can only have a maximum of {0} tags.'.format( MAX_TAGS_PER_INSTANCE ) ) for tag in params.tags: if len(tag['Key']) > MAX_TAG_KEY_LENGTH: raise ValueError( 'Tag Key cannot be longer than {0} characters.'.format( MAX_TAG_KEY_LENGTH ) ) if len(tag['Value']) > MAX_TAG_VALUE_LENGTH: raise ValueError( 'Tag Value cannot be longer than {0} characters.'.format( MAX_TAG_VALUE_LENGTH ) ) def validate_iam_user_arn(params): if params.iam_user_arn and \ not re.match(IAM_USER_ARN_PATTERN, params.iam_user_arn): raise ValueError('Invalid IAM user ARN.') def validate_instance(params): if platform.system() == 'Linux': distribution = awscli.compat.linux_distribution()[0] if 'Ubuntu' in distribution: params.system = Ubuntu(params) if 'Red Hat Enterprise Linux Server' in distribution: params.system = RHEL(params) elif platform.system() == 'Windows': params.system = Windows(params) if 'system' not in params: raise RuntimeError( System.UNSUPPORTED_SYSTEM_MSG ) try: urlopen('http://169.254.169.254/latest/meta-data/', timeout=1) raise RuntimeError('Amazon EC2 instances are not supported.') except (URLError, timeout): pass def validate_s3_location(params, arg_name): arg_name = arg_name.replace('-', '_') if arg_name in params: s3_location = getattr(params, arg_name) if s3_location: matcher = re.match('s3://(.+?)/(.+)', str(s3_location)) if matcher: params.bucket = matcher.group(1) params.key = matcher.group(2) else: raise ValueError( '--{0} must specify the Amazon S3 URL format as ' 's3:///.'.format( arg_name.replace('_', '-') ) ) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/commands.py0000644000000000000000000004131000000000000021633 0ustar00rootroot00000000000000import logging import os from botocore import model from botocore.compat import OrderedDict from botocore.validate import validate_parameters import awscli from awscli.argparser import ArgTableArgParser from awscli.argprocess import unpack_argument, unpack_cli_arg from awscli.arguments import CustomArgument, create_argument_model_from_schema from awscli.clidocs import OperationDocumentEventHandler from awscli.clidriver import CLICommand from awscli.bcdoc import docevents from awscli.help import HelpCommand from awscli.schema import SchemaTransformer LOG = logging.getLogger(__name__) _open = open class _FromFile(object): def __init__(self, *paths, **kwargs): """ ``**kwargs`` can contain a ``root_module`` argument that contains the root module where the file contents should be searched. This is an optional argument, and if no value is provided, will default to ``awscli``. This means that by default we look for examples in the ``awscli`` module. """ self.filename = None if paths: self.filename = os.path.join(*paths) if 'root_module' in kwargs: self.root_module = kwargs['root_module'] else: self.root_module = awscli class BasicCommand(CLICommand): """Basic top level command with no subcommands. If you want to create a new command, subclass this and provide the values documented below. """ # This is the name of your command, so if you want to # create an 'aws mycommand ...' command, the NAME would be # 'mycommand' NAME = 'commandname' # This is the description that will be used for the 'help' # command. DESCRIPTION = 'describe the command' # This is optional, if you are fine with the default synopsis # (the way all the built in operations are documented) then you # can leave this empty. SYNOPSIS = '' # If you want to provide some hand written examples, you can do # so here. This is written in RST format. This is optional, # you don't have to provide any examples, though highly encouraged! EXAMPLES = '' # If your command has arguments, you can specify them here. This is # somewhat of an implementation detail, but this is a list of dicts # where the dicts match the kwargs of the CustomArgument's __init__. # For example, if I want to add a '--argument-one' and an # '--argument-two' command, I'd say: # # ARG_TABLE = [ # {'name': 'argument-one', 'help_text': 'This argument does foo bar.', # 'action': 'store', 'required': False, 'cli_type_name': 'string',}, # {'name': 'argument-two', 'help_text': 'This argument does some other thing.', # 'action': 'store', 'choices': ['a', 'b', 'c']}, # ] # # A `schema` parameter option is available to accept a custom JSON # structure as input. See the file `awscli/schema.py` for more info. ARG_TABLE = [] # If you want the command to have subcommands, you can provide a list of # dicts. We use a list here because we want to allow a user to provide # the order they want to use for subcommands. # SUBCOMMANDS = [ # {'name': 'subcommand1', 'command_class': SubcommandClass}, # {'name': 'subcommand2', 'command_class': SubcommandClass2}, # ] # The command_class must subclass from ``BasicCommand``. SUBCOMMANDS = [] FROM_FILE = _FromFile # You can set the DESCRIPTION, SYNOPSIS, and EXAMPLES to FROM_FILE # and we'll automatically read in that data from the file. # This is useful if you have a lot of content and would prefer to keep # the docs out of the class definition. For example: # # DESCRIPTION = FROM_FILE # # will set the DESCRIPTION value to the contents of # awscli/examples//_description.rst # The naming conventions for these attributes are: # # DESCRIPTION = awscli/examples//_description.rst # SYNOPSIS = awscli/examples//_synopsis.rst # EXAMPLES = awscli/examples//_examples.rst # # You can also provide a relative path and we'll load the file # from the specified location: # # DESCRIPTION = awscli/examples/ # # For example: # # DESCRIPTION = FROM_FILE('command, 'subcommand, '_description.rst') # DESCRIPTION = 'awscli/examples/command/subcommand/_description.rst' # # At this point, the only other thing you have to implement is a _run_main # method (see the method for more information). def __init__(self, session): self._session = session self._arg_table = None self._subcommand_table = None self._lineage = [self] def __call__(self, args, parsed_globals): # args is the remaining unparsed args. # We might be able to parse these args so we need to create # an arg parser and parse them. self._subcommand_table = self._build_subcommand_table() self._arg_table = self._build_arg_table() event = 'before-building-argument-table-parser.%s' % \ ".".join(self.lineage_names) self._session.emit(event, argument_table=self._arg_table, args=args, session=self._session) parser = ArgTableArgParser(self.arg_table, self.subcommand_table) parsed_args, remaining = parser.parse_known_args(args) # Unpack arguments for key, value in vars(parsed_args).items(): cli_argument = None # Convert the name to use dashes instead of underscore # as these are how the parameters are stored in the # `arg_table`. xformed = key.replace('_', '-') if xformed in self.arg_table: cli_argument = self.arg_table[xformed] value = unpack_argument( self._session, 'custom', self.name, cli_argument, value ) # If this parameter has a schema defined, then allow plugins # a chance to process and override its value. if self._should_allow_plugins_override(cli_argument, value): override = self._session\ .emit_first_non_none_response( 'process-cli-arg.%s.%s' % ('custom', self.name), cli_argument=cli_argument, value=value, operation=None) if override is not None: # A plugin supplied a conversion value = override else: # Unpack the argument, which is a string, into the # correct Python type (dict, list, etc) value = unpack_cli_arg(cli_argument, value) self._validate_value_against_schema( cli_argument.argument_model, value) setattr(parsed_args, key, value) if hasattr(parsed_args, 'help'): self._display_help(parsed_args, parsed_globals) elif getattr(parsed_args, 'subcommand', None) is None: # No subcommand was specified so call the main # function for this top level command. if remaining: raise ValueError("Unknown options: %s" % ','.join(remaining)) return self._run_main(parsed_args, parsed_globals) else: return self.subcommand_table[parsed_args.subcommand](remaining, parsed_globals) def _validate_value_against_schema(self, model, value): validate_parameters(value, model) def _should_allow_plugins_override(self, param, value): if (param and param.argument_model is not None and value is not None): return True return False def _run_main(self, parsed_args, parsed_globals): # Subclasses should implement this method. # parsed_globals are the parsed global args (things like region, # profile, output, etc.) # parsed_args are any arguments you've defined in your ARG_TABLE # that are parsed. These will come through as whatever you've # provided as the 'dest' key. Otherwise they default to the # 'name' key. For example: ARG_TABLE[0] = {"name": "foo-arg", ...} # can be accessed by ``parsed_args.foo_arg``. raise NotImplementedError("_run_main") def _build_subcommand_table(self): subcommand_table = OrderedDict() for subcommand in self.SUBCOMMANDS: subcommand_name = subcommand['name'] subcommand_class = subcommand['command_class'] subcommand_table[subcommand_name] = subcommand_class(self._session) self._session.emit('building-command-table.%s' % self.NAME, command_table=subcommand_table, session=self._session, command_object=self) self._add_lineage(subcommand_table) return subcommand_table def _display_help(self, parsed_args, parsed_globals): help_command = self.create_help_command() help_command(parsed_args, parsed_globals) def create_help_command(self): command_help_table = {} if self.SUBCOMMANDS: command_help_table = self.create_help_command_table() return BasicHelp(self._session, self, command_table=command_help_table, arg_table=self.arg_table) def create_help_command_table(self): """ Create the command table into a form that can be handled by the BasicDocHandler. """ commands = {} for command in self.SUBCOMMANDS: commands[command['name']] = command['command_class'](self._session) self._add_lineage(commands) return commands def _build_arg_table(self): arg_table = OrderedDict() self._session.emit('building-arg-table.%s' % self.NAME, arg_table=self.ARG_TABLE) for arg_data in self.ARG_TABLE: # If a custom schema was passed in, create the argument_model # so that it can be validated and docs can be generated. if 'schema' in arg_data: argument_model = create_argument_model_from_schema( arg_data.pop('schema')) arg_data['argument_model'] = argument_model custom_argument = CustomArgument(**arg_data) arg_table[arg_data['name']] = custom_argument return arg_table def _add_lineage(self, command_table): for command in command_table: command_obj = command_table[command] command_obj.lineage = self.lineage + [command_obj] @property def arg_table(self): if self._arg_table is None: self._arg_table = self._build_arg_table() return self._arg_table @property def subcommand_table(self): if self._subcommand_table is None: self._subcommand_table = self._build_subcommand_table() return self._subcommand_table @classmethod def add_command(cls, command_table, session, **kwargs): command_table[cls.NAME] = cls(session) @property def name(self): return self.NAME @property def lineage(self): return self._lineage @lineage.setter def lineage(self, value): self._lineage = value class BasicHelp(HelpCommand): def __init__(self, session, command_object, command_table, arg_table, event_handler_class=None): super(BasicHelp, self).__init__(session, command_object, command_table, arg_table) # This is defined in HelpCommand so we're matching the # casing here. if event_handler_class is None: event_handler_class = BasicDocHandler self.EventHandlerClass = event_handler_class # These are public attributes that are mapped from the command # object. These are used by the BasicDocHandler below. self._description = command_object.DESCRIPTION self._synopsis = command_object.SYNOPSIS self._examples = command_object.EXAMPLES @property def name(self): return self.obj.NAME @property def description(self): return self._get_doc_contents('_description') @property def synopsis(self): return self._get_doc_contents('_synopsis') @property def examples(self): return self._get_doc_contents('_examples') @property def event_class(self): return '.'.join(self.obj.lineage_names) def _get_doc_contents(self, attr_name): value = getattr(self, attr_name) if isinstance(value, BasicCommand.FROM_FILE): if value.filename is not None: trailing_path = value.filename else: trailing_path = os.path.join(self.name, attr_name + '.rst') root_module = value.root_module doc_path = os.path.join( os.path.abspath(os.path.dirname(root_module.__file__)), 'examples', trailing_path) with _open(doc_path) as f: return f.read() else: return value def __call__(self, args, parsed_globals): # Create an event handler for a Provider Document instance = self.EventHandlerClass(self) # Now generate all of the events for a Provider document. # We pass ourselves along so that we can, in turn, get passed # to all event handlers. docevents.generate_events(self.session, self) self.renderer.render(self.doc.getvalue()) instance.unregister() class BasicDocHandler(OperationDocumentEventHandler): def __init__(self, help_command): super(BasicDocHandler, self).__init__(help_command) self.doc = help_command.doc def doc_description(self, help_command, **kwargs): self.doc.style.h2('Description') self.doc.write(help_command.description) self.doc.style.new_paragraph() self._add_top_level_args_reference(help_command) def doc_synopsis_start(self, help_command, **kwargs): if not help_command.synopsis: super(BasicDocHandler, self).doc_synopsis_start( help_command=help_command, **kwargs) else: self.doc.style.h2('Synopsis') self.doc.style.start_codeblock() self.doc.writeln(help_command.synopsis) def doc_synopsis_option(self, arg_name, help_command, **kwargs): if not help_command.synopsis: doc = help_command.doc argument = help_command.arg_table[arg_name] if argument.synopsis: option_str = argument.synopsis elif argument.group_name in self._arg_groups: if argument.group_name in self._documented_arg_groups: # This arg is already documented so we can move on. return option_str = ' | '.join( [a.cli_name for a in self._arg_groups[argument.group_name]]) self._documented_arg_groups.append(argument.group_name) elif argument.cli_type_name == 'boolean': option_str = '%s' % argument.cli_name elif argument.nargs == '+': option_str = "%s [...]" % argument.cli_name else: option_str = '%s ' % argument.cli_name if not (argument.required or argument.positional_arg): option_str = '[%s]' % option_str doc.writeln('%s' % option_str) else: # A synopsis has been provided so we don't need to write # anything here. pass def doc_synopsis_end(self, help_command, **kwargs): if not help_command.synopsis: super(BasicDocHandler, self).doc_synopsis_end( help_command=help_command, **kwargs) else: self.doc.style.end_codeblock() def doc_examples(self, help_command, **kwargs): if help_command.examples: self.doc.style.h2('Examples') self.doc.write(help_command.examples) def doc_subitems_start(self, help_command, **kwargs): if help_command.command_table: doc = help_command.doc doc.style.h2('Available Commands') doc.style.toctree() def doc_subitem(self, command_name, help_command, **kwargs): if help_command.command_table: doc = help_command.doc doc.style.tocitem(command_name) def doc_subitems_end(self, help_command, **kwargs): pass def doc_output(self, help_command, event_name, **kwargs): pass def doc_options_end(self, help_command, **kwargs): self._add_top_level_args_reference(help_command) ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0892723 awscli-1.22.34/awscli/customizations/configservice/0000755000000000000000000000000000000000000022307 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configservice/__init__.py0000644000000000000000000000106500000000000024422 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configservice/getstatus.py0000644000000000000000000001024200000000000024703 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys from awscli.customizations.commands import BasicCommand def register_get_status(cli): cli.register('building-command-table.configservice', add_get_status) def add_get_status(command_table, session, **kwargs): command_table['get-status'] = GetStatusCommand(session) class GetStatusCommand(BasicCommand): NAME = 'get-status' DESCRIPTION = ('Reports the status of all of configuration ' 'recorders and delivery channels.') def __init__(self, session): self._config_client = None super(GetStatusCommand, self).__init__(session) def _run_main(self, parsed_args, parsed_globals): self._setup_client(parsed_globals) self._check_configuration_recorders() self._check_delivery_channels() return 0 def _setup_client(self, parsed_globals): client_args = { 'verify': parsed_globals.verify_ssl, 'region_name': parsed_globals.region, 'endpoint_url': parsed_globals.endpoint_url } self._config_client = self._session.create_client('config', **client_args) def _check_configuration_recorders(self): status = self._config_client.describe_configuration_recorder_status() sys.stdout.write('Configuration Recorders:\n\n') for configuration_recorder in status['ConfigurationRecordersStatus']: self._check_configure_recorder_status(configuration_recorder) sys.stdout.write('\n') def _check_configure_recorder_status(self, configuration_recorder): # Get the name of the recorder and print it out. name = configuration_recorder['name'] sys.stdout.write('name: %s\n' % name) # Get the recording status and print it out. recording = configuration_recorder['recording'] recording_map = {False: 'OFF', True: 'ON'} sys.stdout.write('recorder: %s\n' % recording_map[recording]) # If the recorder is on, get the last status and print it out. if recording: self._check_last_status(configuration_recorder) def _check_delivery_channels(self): status = self._config_client.describe_delivery_channel_status() sys.stdout.write('Delivery Channels:\n\n') for delivery_channel in status['DeliveryChannelsStatus']: self._check_delivery_channel_status(delivery_channel) sys.stdout.write('\n') def _check_delivery_channel_status(self, delivery_channel): # Get the name of the delivery channel and print it out. name = delivery_channel['name'] sys.stdout.write('name: %s\n' % name) # Obtain the various delivery statuses. stream_delivery = delivery_channel['configStreamDeliveryInfo'] history_delivery = delivery_channel['configHistoryDeliveryInfo'] snapshot_delivery = delivery_channel['configSnapshotDeliveryInfo'] # Print the statuses out if they exist. if stream_delivery: self._check_last_status(stream_delivery, 'stream delivery ') if history_delivery: self._check_last_status(history_delivery, 'history delivery ') if snapshot_delivery: self._check_last_status(snapshot_delivery, 'snapshot delivery ') def _check_last_status(self, status, status_name=''): last_status = status['lastStatus'] sys.stdout.write('last %sstatus: %s\n' % (status_name, last_status)) if last_status == "FAILURE": sys.stdout.write('error code: %s\n' % status['lastErrorCode']) sys.stdout.write('message: %s\n' % status['lastErrorMessage']) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configservice/putconfigurationrecorder.py0000644000000000000000000000612200000000000030010 0ustar00rootroot00000000000000# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import copy from awscli.arguments import CLIArgument def register_modify_put_configuration_recorder(cli): cli.register( 'building-argument-table.configservice.put-configuration-recorder', extract_recording_group) def extract_recording_group(session, argument_table, **kwargs): # The purpose of this customization is to extract the recordingGroup # member from ConfigurationRecorder into its own argument. # This customization is needed because the recordingGroup member # breaks the shorthand syntax as it is a structure and not a scalar value. configuration_recorder_argument = argument_table['configuration-recorder'] configuration_recorder_model = copy.deepcopy( configuration_recorder_argument.argument_model) recording_group_model = copy.deepcopy( configuration_recorder_argument.argument_model. members['recordingGroup']) del configuration_recorder_model.members['recordingGroup'] argument_table['configuration-recorder'] = ConfigurationRecorderArgument( name='configuration-recorder', argument_model=configuration_recorder_model, operation_model=configuration_recorder_argument._operation_model, is_required=True, event_emitter=session.get_component('event_emitter'), serialized_name='ConfigurationRecorder' ) argument_table['recording-group'] = RecordingGroupArgument( name='recording-group', argument_model=recording_group_model, operation_model=configuration_recorder_argument._operation_model, is_required=False, event_emitter=session.get_component('event_emitter'), serialized_name='recordingGroup' ) class ConfigurationRecorderArgument(CLIArgument): def add_to_params(self, parameters, value): if value is None: return unpacked = self._unpack_argument(value) if 'ConfigurationRecorder' in parameters: current_value = parameters['ConfigurationRecorder'] current_value.update(unpacked) else: parameters['ConfigurationRecorder'] = unpacked class RecordingGroupArgument(CLIArgument): def add_to_params(self, parameters, value): if value is None: return unpacked = self._unpack_argument(value) if 'ConfigurationRecorder' in parameters: parameters['ConfigurationRecorder']['recordingGroup'] = unpacked else: parameters['ConfigurationRecorder'] = {} parameters['ConfigurationRecorder']['recordingGroup'] = unpacked ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configservice/rename_cmd.py0000644000000000000000000000163400000000000024757 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from awscli.customizations import utils def register_rename_config(cli): cli.register('building-command-table.main', change_name) def change_name(command_table, session, **kwargs): """ Change all existing ``aws config`` commands to ``aws configservice`` commands. """ utils.rename_command(command_table, 'config', 'configservice') ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configservice/subscribe.py0000644000000000000000000001552400000000000024651 0ustar00rootroot00000000000000# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import json import sys from awscli.customizations.commands import BasicCommand from awscli.customizations.utils import s3_bucket_exists from awscli.customizations.s3.utils import find_bucket_key S3_BUCKET = {'name': 's3-bucket', 'required': True, 'help_text': ('The S3 bucket that the AWS Config delivery channel' ' will use. If the bucket does not exist, it will ' 'be automatically created. The value for this ' 'argument should follow the form ' 'bucket/prefix. Note that the prefix is optional.')} SNS_TOPIC = {'name': 'sns-topic', 'required': True, 'help_text': ('The SNS topic that the AWS Config delivery channel' ' will use. If the SNS topic does not exist, it ' 'will be automatically created. Value for this ' 'should be a valid SNS topic name or the ARN of an ' 'existing SNS topic.')} IAM_ROLE = {'name': 'iam-role', 'required': True, 'help_text': ('The IAM role that the AWS Config configuration ' 'recorder will use to record current resource ' 'configurations. Value for this should be the ' 'ARN of the desired IAM role.')} def register_subscribe(cli): cli.register('building-command-table.configservice', add_subscribe) def add_subscribe(command_table, session, **kwargs): command_table['subscribe'] = SubscribeCommand(session) class SubscribeCommand(BasicCommand): NAME = 'subscribe' DESCRIPTION = ('Subcribes user to AWS Config by creating an AWS Config ' 'delivery channel and configuration recorder to track ' 'AWS resource configurations. The names of the default ' 'channel and configuration recorder will be default.') ARG_TABLE = [S3_BUCKET, SNS_TOPIC, IAM_ROLE] def __init__(self, session): self._s3_client = None self._sns_client = None self._config_client = None super(SubscribeCommand, self).__init__(session) def _run_main(self, parsed_args, parsed_globals): # Setup the necessary all of the necessary clients. self._setup_clients(parsed_globals) # Prepare a s3 bucket for use. s3_bucket_helper = S3BucketHelper(self._s3_client) bucket, prefix = s3_bucket_helper.prepare_bucket(parsed_args.s3_bucket) # Prepare a sns topic for use. sns_topic_helper = SNSTopicHelper(self._sns_client) sns_topic_arn = sns_topic_helper.prepare_topic(parsed_args.sns_topic) name = 'default' # Create a configuration recorder. self._config_client.put_configuration_recorder( ConfigurationRecorder={ 'name': name, 'roleARN': parsed_args.iam_role } ) # Create a delivery channel. delivery_channel = { 'name': name, 's3BucketName': bucket, 'snsTopicARN': sns_topic_arn } if prefix: delivery_channel['s3KeyPrefix'] = prefix self._config_client.put_delivery_channel( DeliveryChannel=delivery_channel) # Start the configuration recorder. self._config_client.start_configuration_recorder( ConfigurationRecorderName=name ) # Describe the configuration recorders sys.stdout.write('Subscribe succeeded:\n\n') sys.stdout.write('Configuration Recorders: ') response = self._config_client.describe_configuration_recorders() sys.stdout.write( json.dumps(response['ConfigurationRecorders'], indent=4)) sys.stdout.write('\n\n') # Describe the delivery channels sys.stdout.write('Delivery Channels: ') response = self._config_client.describe_delivery_channels() sys.stdout.write(json.dumps(response['DeliveryChannels'], indent=4)) sys.stdout.write('\n') return 0 def _setup_clients(self, parsed_globals): client_args = { 'verify': parsed_globals.verify_ssl, 'region_name': parsed_globals.region } self._s3_client = self._session.create_client('s3', **client_args) self._sns_client = self._session.create_client('sns', **client_args) # Use the specified endpoint only for config related commands. client_args['endpoint_url'] = parsed_globals.endpoint_url self._config_client = self._session.create_client('config', **client_args) class S3BucketHelper(object): def __init__(self, s3_client): self._s3_client = s3_client def prepare_bucket(self, s3_path): bucket, key = find_bucket_key(s3_path) bucket_exists = self._check_bucket_exists(bucket) if not bucket_exists: self._create_bucket(bucket) sys.stdout.write('Using new S3 bucket: %s\n' % bucket) else: sys.stdout.write('Using existing S3 bucket: %s\n' % bucket) return bucket, key def _check_bucket_exists(self, bucket): return s3_bucket_exists(self._s3_client, bucket) def _create_bucket(self, bucket): region_name = self._s3_client.meta.region_name params = { 'Bucket': bucket } bucket_config = {'LocationConstraint': region_name} if region_name != 'us-east-1': params['CreateBucketConfiguration'] = bucket_config self._s3_client.create_bucket(**params) class SNSTopicHelper(object): def __init__(self, sns_client): self._sns_client = sns_client def prepare_topic(self, sns_topic): sns_topic_arn = sns_topic # Create the topic if a name is given. if not self._check_is_arn(sns_topic): response = self._sns_client.create_topic(Name=sns_topic) sns_topic_arn = response['TopicArn'] sys.stdout.write('Using new SNS topic: %s\n' % sns_topic_arn) else: sys.stdout.write('Using existing SNS topic: %s\n' % sns_topic_arn) return sns_topic_arn def _check_is_arn(self, sns_topic): # The name of topic cannot contain a colon only arns have colons. return ':' in sns_topic ././@PaxHeader0000000000000000000000000000003400000000000011452 xustar000000000000000028 mtime=1642014456.0892723 awscli-1.22.34/awscli/customizations/configure/0000755000000000000000000000000000000000000021442 5ustar00rootroot00000000000000././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configure/__init__.py0000644000000000000000000000274000000000000023556 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import string from botocore.vendored.six.moves import shlex_quote NOT_SET = '' PREDEFINED_SECTION_NAMES = ('preview', 'plugins') _WHITESPACE = ' \t' class ConfigValue(object): def __init__(self, value, config_type, config_variable): self.value = value self.config_type = config_type self.config_variable = config_variable def mask_value(self): if self.value is NOT_SET: return self.value = mask_value(self.value) class SectionNotFoundError(Exception): pass def mask_value(current_value): if current_value is None: return 'None' else: return ('*' * 16) + current_value[-4:] def profile_to_section(profile_name): """Converts a profile name to a section header to be used in the config.""" if any(c in _WHITESPACE for c in profile_name): profile_name = shlex_quote(profile_name) return 'profile %s' % profile_name ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configure/addmodel.py0000644000000000000000000001140000000000000023561 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import json import os from botocore.model import ServiceModel from awscli.customizations.commands import BasicCommand def _get_endpoint_prefix_to_name_mappings(session): # Get the mappings of endpoint prefixes to service names from the # available service models. prefixes_to_services = {} for service_name in session.get_available_services(): service_model = session.get_service_model(service_name) prefixes_to_services[service_model.endpoint_prefix] = service_name return prefixes_to_services def _get_service_name(session, endpoint_prefix): if endpoint_prefix in session.get_available_services(): # Check if the endpoint prefix is a pre-existing service. # If it is, use that endpoint prefix as the service name. return endpoint_prefix else: # The service may have a different endpoint prefix than its name # So we need to determine what the correct mapping may be. # Figure out the mappings of endpoint prefix to service names. name_mappings = _get_endpoint_prefix_to_name_mappings(session) # Determine the service name from the mapping. # If it does not exist in the mapping, return the original endpoint # prefix. return name_mappings.get(endpoint_prefix, endpoint_prefix) def get_model_location(session, service_definition, service_name=None): """Gets the path of where a service-2.json file should go in ~/.aws/models :type session: botocore.session.Session :param session: A session object :type service_definition: dict :param service_definition: The json loaded service definition :type service_name: str :param service_name: The service name to use. If this not provided, this will be determined from a combination of available services and the service definition. :returns: The path to where are model should be placed based on the service defintion and the current services in botocore. """ # Add the ServiceModel abstraction over the service json definition to # make it easier to work with. service_model = ServiceModel(service_definition) # Determine the service_name if not provided if service_name is None: endpoint_prefix = service_model.endpoint_prefix service_name = _get_service_name(session, endpoint_prefix) api_version = service_model.api_version # For the model location we only want the custom data path (~/.aws/models # not the one set by AWS_DATA_PATH) data_path = session.get_component('data_loader').CUSTOMER_DATA_PATH # Use the version of the model to determine the file's naming convention. service_model_name = ( 'service-%d.json' % int( float(service_definition.get('version', '2.0')))) return os.path.join(data_path, service_name, api_version, service_model_name) class AddModelCommand(BasicCommand): NAME = 'add-model' DESCRIPTION = ( 'Adds a service JSON model to the appropriate location in ' '~/.aws/models. Once the model gets added, CLI commands and Boto3 ' 'clients will be immediately available for the service JSON model ' 'provided.' ) ARG_TABLE = [ {'name': 'service-model', 'required': True, 'help_text': ( 'The contents of the service JSON model.')}, {'name': 'service-name', 'help_text': ( 'Overrides the default name used by the service JSON ' 'model to generate CLI service commands and Boto3 clients.')} ] def _run_main(self, parsed_args, parsed_globals): service_definition = json.loads(parsed_args.service_model) # Get the path to where the model should be written model_location = get_model_location( self._session, service_definition, parsed_args.service_name ) # If the service_name/api_version directories do not exist, # then create them. model_directory = os.path.dirname(model_location) if not os.path.exists(model_directory): os.makedirs(model_directory) # Write the model to the specified location with open(model_location, 'wb') as f: f.write(parsed_args.service_model.encode('utf-8')) return 0 ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configure/configure.py0000644000000000000000000001350700000000000024003 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import logging from botocore.exceptions import ProfileNotFound from awscli.compat import compat_input from awscli.customizations.commands import BasicCommand from awscli.customizations.configure.addmodel import AddModelCommand from awscli.customizations.configure.set import ConfigureSetCommand from awscli.customizations.configure.get import ConfigureGetCommand from awscli.customizations.configure.list import ConfigureListCommand from awscli.customizations.configure.writer import ConfigFileWriter from . import mask_value, profile_to_section logger = logging.getLogger(__name__) def register_configure_cmd(cli): cli.register('building-command-table.main', ConfigureCommand.add_command) class InteractivePrompter(object): def get_value(self, current_value, config_name, prompt_text=''): if config_name in ('aws_access_key_id', 'aws_secret_access_key'): current_value = mask_value(current_value) response = compat_input("%s [%s]: " % (prompt_text, current_value)) if not response: # If the user hits enter, we return a value of None # instead of an empty string. That way we can determine # whether or not a value has changed. response = None return response class ConfigureCommand(BasicCommand): NAME = 'configure' DESCRIPTION = BasicCommand.FROM_FILE() SYNOPSIS = ('aws configure [--profile profile-name]') EXAMPLES = ( 'To create a new configuration::\n' '\n' ' $ aws configure\n' ' AWS Access Key ID [None]: accesskey\n' ' AWS Secret Access Key [None]: secretkey\n' ' Default region name [None]: us-west-2\n' ' Default output format [None]:\n' '\n' 'To update just the region name::\n' '\n' ' $ aws configure\n' ' AWS Access Key ID [****]:\n' ' AWS Secret Access Key [****]:\n' ' Default region name [us-west-1]: us-west-2\n' ' Default output format [None]:\n' ) SUBCOMMANDS = [ {'name': 'list', 'command_class': ConfigureListCommand}, {'name': 'get', 'command_class': ConfigureGetCommand}, {'name': 'set', 'command_class': ConfigureSetCommand}, {'name': 'add-model', 'command_class': AddModelCommand} ] # If you want to add new values to prompt, update this list here. VALUES_TO_PROMPT = [ # (logical_name, config_name, prompt_text) ('aws_access_key_id', "AWS Access Key ID"), ('aws_secret_access_key', "AWS Secret Access Key"), ('region', "Default region name"), ('output', "Default output format"), ] def __init__(self, session, prompter=None, config_writer=None): super(ConfigureCommand, self).__init__(session) if prompter is None: prompter = InteractivePrompter() self._prompter = prompter if config_writer is None: config_writer = ConfigFileWriter() self._config_writer = config_writer def _run_main(self, parsed_args, parsed_globals): # Called when invoked with no args "aws configure" new_values = {} # This is the config from the config file scoped to a specific # profile. try: config = self._session.get_scoped_config() except ProfileNotFound: config = {} for config_name, prompt_text in self.VALUES_TO_PROMPT: current_value = config.get(config_name) new_value = self._prompter.get_value(current_value, config_name, prompt_text) if new_value is not None and new_value != current_value: new_values[config_name] = new_value config_filename = os.path.expanduser( self._session.get_config_variable('config_file')) if new_values: profile = self._session.profile self._write_out_creds_file_values(new_values, profile) if profile is not None: section = profile_to_section(profile) new_values['__section__'] = section self._config_writer.update_config(new_values, config_filename) def _write_out_creds_file_values(self, new_values, profile_name): # The access_key/secret_key are now *always* written to the shared # credentials file (~/.aws/credentials), see aws/aws-cli#847. # post-conditions: ~/.aws/credentials will have the updated credential # file values and new_values will have the cred vars removed. credential_file_values = {} if 'aws_access_key_id' in new_values: credential_file_values['aws_access_key_id'] = new_values.pop( 'aws_access_key_id') if 'aws_secret_access_key' in new_values: credential_file_values['aws_secret_access_key'] = new_values.pop( 'aws_secret_access_key') if credential_file_values: if profile_name is not None: credential_file_values['__section__'] = profile_name shared_credentials_filename = os.path.expanduser( self._session.get_config_variable('credentials_file')) self._config_writer.update_config( credential_file_values, shared_credentials_filename) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configure/get.py0000644000000000000000000001023700000000000022576 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys import logging from awscli.customizations.commands import BasicCommand from awscli.compat import six from . import PREDEFINED_SECTION_NAMES LOG = logging.getLogger(__name__) class ConfigureGetCommand(BasicCommand): NAME = 'get' DESCRIPTION = BasicCommand.FROM_FILE('configure', 'get', '_description.rst') SYNOPSIS = 'aws configure get varname [--profile profile-name]' EXAMPLES = BasicCommand.FROM_FILE('configure', 'get', '_examples.rst') ARG_TABLE = [ {'name': 'varname', 'help_text': 'The name of the config value to retrieve.', 'action': 'store', 'cli_type_name': 'string', 'positional_arg': True}, ] def __init__(self, session, stream=sys.stdout, error_stream=sys.stderr): super(ConfigureGetCommand, self).__init__(session) self._stream = stream self._error_stream = error_stream def _run_main(self, args, parsed_globals): varname = args.varname if '.' not in varname: # get_scoped_config() returns the config variables in the config # file (not the logical_var names), which is what we want. config = self._session.get_scoped_config() value = config.get(varname) else: value = self._get_dotted_config_value(varname) LOG.debug(u'Config value retrieved: %s' % value) if isinstance(value, six.string_types): self._stream.write(value) self._stream.write('\n') return 0 elif isinstance(value, dict): # TODO: add support for this. We would need to print it off in # the same format as the config file. self._error_stream.write( 'varname (%s) must reference a value, not a section or ' 'sub-section.' % varname ) return 1 else: return 1 def _get_dotted_config_value(self, varname): parts = varname.split('.') num_dots = varname.count('.') # Logic to deal with predefined sections like [preview], [plugin] and # etc. if num_dots == 1 and parts[0] in PREDEFINED_SECTION_NAMES: full_config = self._session.full_config section, config_name = varname.split('.') value = full_config.get(section, {}).get(config_name) if value is None: # Try to retrieve it from the profile config. value = full_config['profiles'].get( section, {}).get(config_name) return value if parts[0] == 'profile': profile_name = parts[1] config_name = parts[2] remaining = parts[3:] # Check if varname starts with 'default' profile (e.g. # default.emr-dev.emr.instance_profile) If not, go further to check # if varname starts with a known profile name elif parts[0] == 'default' or ( parts[0] in self._session.full_config['profiles']): profile_name = parts[0] config_name = parts[1] remaining = parts[2:] else: profile_name = self._session.get_config_variable('profile') if profile_name is None: profile_name = 'default' config_name = parts[0] remaining = parts[1:] value = self._session.full_config['profiles'].get( profile_name, {}).get(config_name) if len(remaining) == 1: try: value = value.get(remaining[-1]) except AttributeError: value = None return value ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configure/list.py0000644000000000000000000001256500000000000023000 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys from awscli.customizations.commands import BasicCommand from . import ConfigValue, NOT_SET class ConfigureListCommand(BasicCommand): NAME = 'list' DESCRIPTION = ( 'List the AWS CLI configuration data. This command will ' 'show you the current configuration data. For each configuration ' 'item, it will show you the value, where the configuration value ' 'was retrieved, and the configuration variable name. For example, ' 'if you provide the AWS region in an environment variable, this ' 'command will show you the name of the region you\'ve configured, ' 'it will tell you that this value came from an environment ' 'variable, and it will tell you the name of the environment ' 'variable.\n' ) SYNOPSIS = 'aws configure list [--profile profile-name]' EXAMPLES = ( 'To show your current configuration values::\n' '\n' ' $ aws configure list\n' ' Name Value Type Location\n' ' ---- ----- ---- --------\n' ' profile None None\n' ' access_key ****************ABCD config_file ~/.aws/config\n' ' secret_key ****************ABCD config_file ~/.aws/config\n' ' region us-west-2 env AWS_DEFAULT_REGION\n' '\n' ) def __init__(self, session, stream=sys.stdout): super(ConfigureListCommand, self).__init__(session) self._stream = stream def _run_main(self, args, parsed_globals): self._display_config_value(ConfigValue('Value', 'Type', 'Location'), 'Name') self._display_config_value(ConfigValue('-----', '----', '--------'), '----') if parsed_globals and parsed_globals.profile is not None: profile = ConfigValue(self._session.profile, 'manual', '--profile') else: profile = self._lookup_config('profile') self._display_config_value(profile, 'profile') access_key, secret_key = self._lookup_credentials() self._display_config_value(access_key, 'access_key') self._display_config_value(secret_key, 'secret_key') region = self._lookup_config('region') self._display_config_value(region, 'region') def _display_config_value(self, config_value, config_name): self._stream.write('%10s %24s %16s %s\n' % ( config_name, config_value.value, config_value.config_type, config_value.config_variable)) def _lookup_credentials(self): # First try it with _lookup_config. It's possible # that we don't find credentials this way (for example, # if we're using an IAM role). access_key = self._lookup_config('access_key') if access_key.value is not NOT_SET: secret_key = self._lookup_config('secret_key') access_key.mask_value() secret_key.mask_value() return access_key, secret_key else: # Otherwise we can try to use get_credentials(). # This includes a few more lookup locations # (IAM roles, some of the legacy configs, etc.) credentials = self._session.get_credentials() if credentials is None: no_config = ConfigValue(NOT_SET, None, None) return no_config, no_config else: # For the ConfigValue, we don't track down the # config_variable because that info is not # visible from botocore.credentials. I think # the credentials.method is sufficient to show # where the credentials are coming from. access_key = ConfigValue(credentials.access_key, credentials.method, '') secret_key = ConfigValue(credentials.secret_key, credentials.method, '') access_key.mask_value() secret_key.mask_value() return access_key, secret_key def _lookup_config(self, name): # First try to look up the variable in the env. value = self._session.get_config_variable(name, methods=('env',)) if value is not None: return ConfigValue(value, 'env', self._session.session_var_map[name][1]) # Then try to look up the variable in the config file. value = self._session.get_config_variable(name, methods=('config',)) if value is not None: return ConfigValue(value, 'config-file', self._session.get_config_variable('config_file')) else: return ConfigValue(NOT_SET, None, None) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configure/set.py0000644000000000000000000001120500000000000022606 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os from awscli.customizations.commands import BasicCommand from awscli.customizations.configure.writer import ConfigFileWriter from . import PREDEFINED_SECTION_NAMES, profile_to_section class ConfigureSetCommand(BasicCommand): NAME = 'set' DESCRIPTION = BasicCommand.FROM_FILE('configure', 'set', '_description.rst') SYNOPSIS = 'aws configure set varname value [--profile profile-name]' EXAMPLES = BasicCommand.FROM_FILE('configure', 'set', '_examples.rst') ARG_TABLE = [ {'name': 'varname', 'help_text': 'The name of the config value to set.', 'action': 'store', 'cli_type_name': 'string', 'positional_arg': True}, {'name': 'value', 'help_text': 'The value to set.', 'action': 'store', 'no_paramfile': True, # To disable the default paramfile behavior 'cli_type_name': 'string', 'positional_arg': True}, ] # Any variables specified in this list will be written to # the ~/.aws/credentials file instead of ~/.aws/config. _WRITE_TO_CREDS_FILE = ['aws_access_key_id', 'aws_secret_access_key', 'aws_session_token'] def __init__(self, session, config_writer=None): super(ConfigureSetCommand, self).__init__(session) if config_writer is None: config_writer = ConfigFileWriter() self._config_writer = config_writer def _get_config_file(self, path): config_path = self._session.get_config_variable(path) return os.path.expanduser(config_path) def _run_main(self, args, parsed_globals): varname = args.varname value = args.value profile = 'default' # Before handing things off to the config writer, # we need to find out three things: # 1. What section we're writing to (profile). # 2. The name of the config key (varname) # 3. The actual value (value). if '.' not in varname: # unqualified name, scope it to the current # profile (or leave it as the 'default' section if # no profile is set). if self._session.profile is not None: profile = self._session.profile else: # First figure out if it's been scoped to a profile. parts = varname.split('.') if parts[0] in ('default', 'profile'): # Then we know we're scoped to a profile. if parts[0] == 'default': profile = 'default' remaining = parts[1:] else: # [profile, profile_name, ...] profile = parts[1] remaining = parts[2:] varname = remaining[0] if len(remaining) == 2: value = {remaining[1]: value} elif parts[0] not in PREDEFINED_SECTION_NAMES: if self._session.profile is not None: profile = self._session.profile else: profile_name = self._session.get_config_variable('profile') if profile_name is not None: profile = profile_name varname = parts[0] if len(parts) == 2: value = {parts[1]: value} elif len(parts) == 2: # Otherwise it's something like "set preview.service true" # of something in the [plugin] section. profile, varname = parts config_filename = self._get_config_file('config_file') if varname in self._WRITE_TO_CREDS_FILE: # When writing to the creds file, the section is just the profile section = profile config_filename = self._get_config_file('credentials_file') elif profile in PREDEFINED_SECTION_NAMES or profile == 'default': section = profile else: section = profile_to_section(profile) updated_config = {'__section__': section, varname: value} self._config_writer.update_config(updated_config, config_filename) ././@PaxHeader0000000000000000000000000000002600000000000011453 xustar000000000000000022 mtime=1642014254.0 awscli-1.22.34/awscli/customizations/configure/writer.py0000644000000000000000000002123600000000000023334 0ustar00rootroot00000000000000# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import re from . import SectionNotFoundError class ConfigFileWriter(object): SECTION_REGEX = re.compile(r'^\s*\[(?P
[^]]+)\]') OPTION_REGEX = re.compile( r'(?P