pax_global_header00006660000000000000000000000064140041246050014506gustar00rootroot0000000000000052 comment=9b67deacc860e10cd95eb968cfa770fe475f7d8e msrest-for-python-0.6.21/000077500000000000000000000000001400412460500152145ustar00rootroot00000000000000msrest-for-python-0.6.21/.coveragerc000066400000000000000000000001011400412460500173250ustar00rootroot00000000000000[report] exclude_lines = pragma: no cover if TYPE_CHECKING: msrest-for-python-0.6.21/.gitignore000066400000000000000000000001671400412460500172100ustar00rootroot00000000000000__pycache__ env* .venv *.egg-info .tox .coverage coverage.xml *.pyc .idea build/ dist/ .cache .pytest_cache .mypy_cachemsrest-for-python-0.6.21/.travis.yml000066400000000000000000000041431400412460500173270ustar00rootroot00000000000000dist: xenial language: python cache: pip _test: &_test install: - pip install tox tox-virtualenv-no-download script: - tox after_success: - bash <(curl -s https://codecov.io/bash) -e TOXENV -f $TRAVIS_BUILD_DIR/coverage.xml _autorest_install: &_autorest_install before_install: - git clone https://github.com/Azure/autorest.python.git --branch autorestv3 --single-branch - nvm install 10 - pushd autorest.python - npm install "@microsoft.azure/autorest.testserver" # Install test server pre-requisites - popd jobs: include: - stage: MyPy python: 3.6 install: - pip install -r dev_requirements.txt script: - mypy msrest - python -c 'import typing; typing.TYPE_CHECKING = True; import msrest' # Testing there is no circular dependencies in Type checking mode - stage: Test python: 2.7 env: TOXENV=py27 <<: *_test - stage: Test python: 3.5 env: TOXENV=py35 <<: *_test - stage: Test python: 3.6 env: TOXENV=py36 <<: *_test - stage: Test python: 3.7 env: TOXENV=py37 <<: *_test - stage: Test python: 3.8 env: TOXENV=py38 <<: *_test - stage: Test python: 2.7 env: TOXENV=py27-autorest <<: *_autorest_install <<: *_test - stage: Test python: 3.5 env: TOXENV=py35-autorest <<: *_autorest_install <<: *_test - stage: Test python: 3.6 env: TOXENV=py36-autorest <<: *_autorest_install <<: *_test - stage: Test python: 3.7 env: TOXENV=py37-autorest <<: *_autorest_install <<: *_test - stage: Test python: 3.8 env: TOXENV=py38-autorest <<: *_autorest_install <<: *_test allow_failures: - env: TOXENV=py27-autorest - env: TOXENV=py35-autorest - env: TOXENV=py36-autorest - env: TOXENV=py37-autorest - env: TOXENV=py38-autorest deploy: provider: pypi user: Laurent.Mazuel skip_upload_docs: true # password: use $PYPI_PASSWORD distributions: "sdist bdist_wheel" on: tags: true python: '3.6' msrest-for-python-0.6.21/.vscode/000077500000000000000000000000001400412460500165555ustar00rootroot00000000000000msrest-for-python-0.6.21/.vscode/settings.json000066400000000000000000000004421400412460500213100ustar00rootroot00000000000000// Place your settings in this file to overwrite default and user settings. { "python.testing.pytestArgs": [], "python.testing.pytestEnabled": true, "files.exclude": { "**/*.pyc": true }, "git.ignoreLimitWarning": true, "python.linting.pylintEnabled": true }msrest-for-python-0.6.21/LICENSE.md000066400000000000000000000020601400412460500166160ustar00rootroot00000000000000MIT License Copyright (c) 2016 Microsoft Azure Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. msrest-for-python-0.6.21/MANIFEST.in000066400000000000000000000001021400412460500167430ustar00rootroot00000000000000include *.rst recursive-include tests *.py include msrest/py.typedmsrest-for-python-0.6.21/README.rst000066400000000000000000000470521400412460500167130ustar00rootroot00000000000000AutoRest: Python Client Runtime =============================== .. image:: https://travis-ci.org/Azure/msrest-for-python.svg?branch=master :target: https://travis-ci.org/Azure/msrest-for-python .. image:: https://codecov.io/gh/azure/msrest-for-python/branch/master/graph/badge.svg :target: https://codecov.io/gh/azure/msrest-for-python Installation ------------ To install: .. code-block:: bash $ pip install msrest Release History --------------- 2021-01-26 Version 0.6.21 +++++++++++++++++++++++++ **Bug Fixes** - Fixes `failsafe_deserialize` introduced in `0.6.20` #232 2021-01-25 Version 0.6.20 +++++++++++++++++++++++++ **Features** - Add `failsafe_deserialize` method to the `Deserializer` object. #232 - Serialize `datetime`, `date`, `time`, `timedelta` and `Decimal` correctly when serializing `object` . #224 2020-09-08 Version 0.6.19 +++++++++++++++++++++++++ **Bugfixes** - Fix serialization of random Model object #220 - Fix serialization of unicode string in Py2 and object mode #221 2020-07-27 Version 0.6.18 +++++++++++++++++++++++++ **Features** - Add support for attributes/text in the same XML node #218 2020-06-25 Version 0.6.17 +++++++++++++++++++++++++ **Bugfixes** - Fix XML and discriminator #214 2020-06-09 Version 0.6.16 +++++++++++++++++++++++++ **Bugfixes** - Fix XML parsing with namespaces and attributes #209 **Features** - Add py.typed for mypy support 2020-06-04 Version 0.6.15 +++++++++++++++++++++++++ **Bugfixes** - Fix RFC regression introduced in 0.6.14 (RFC parse date are no longer pickable) #208 - Fix XML parsing with namespaces #206 Thanks to ivanst0 for the contribution 2020-05-18 Version 0.6.14 +++++++++++++++++++++++++ **Bugfixes** - Fix "from_dict" in some complex flattening scenario #204 - Fix RFC date parsing if machine locale is not English #201 2020-04-07 Version 0.6.13 +++++++++++++++++++++++++ **Bugfixes** - Fix deserializer and flattening if intermediate node is None #198 - Fix validation exception message for minimum/maximum checks #199 2020-04-06 Version 0.6.12 +++++++++++++++++++++++++ **Features** - Add "time" serializer/deserializer #196 2020-01-30 Version 0.6.11 +++++++++++++++++++++++++ **Features** - XML mode can now be enabled even if the given Model has no XML metadata #184 - Add Kerberos Authentication #186 - Improve error message if expected type is dictionnary and something else is provided #188 **Bugfixes** - Fix comma separated serialization of array in query #186 - Fix validation of basic types in some complex scenario #189 Thanks to catatonicprime for the contribution 2019-09-04 Version 0.6.10 +++++++++++++++++++++++++ **Features** - XML mode now supports OpenAPI additional properties # 174 **Bugfixes** - Accept "is_xml" kwargs to force XML serialization #178 - Disable XML deserialization if received element is not an ElementTree #178 - A "null" enum deserialize as None, and not "None" anymore #173 - Fix some UTF8 encoding issue in Python 2.7 and XML mode #172 2019-07-24 Version 0.6.9 ++++++++++++++++++++++++ **Features** - Accept extensions of JSON mimetype as valid JSON #167 2019-06-24 Version 0.6.8 ++++++++++++++++++++++++ **BugFixes** - Impossible to serialize XML if model contains UTF8 characters on Python 2.7 #165 - Impossible to deserialize a HTTP response as XML if body contains UTF8 characters on Python 2.7 #165 - Loading a serialized configuration fails with NameError on NoOptionError #162 Thanks to cclauss for the contribution 2019-06-12 Version 0.6.7 ++++++++++++++++++++++++ **Features** - Add DomainCredentials credentials for EventGrid Thanks to kalyanaj for the contribution 2019-03-21 Version 0.6.6 ++++++++++++++++++++++++ **Bugfixes** - Make 0.6.x series compatible with pyinstaller again - sdist now includes tests Thanks to dotlambda for the contribution 2019-03-11 Version 0.6.5 ++++++++++++++++++++++++ **Bugfixes** - Fix list of integers serialization if div is provided #151 - Fix parsing of UTF8 with BOM #145 Thanks to eduardomourar for the contribution 2019-01-09 Version 0.6.4 ++++++++++++++++++++++++ **Bugfixes** - Fix regression on credentials configuration if used outside of Autorest scope #135 2019-01-08 Version 0.6.3 ++++++++++++++++++++++++ **Features** - Updated **experimental** async support. Requires Autorest.Python 4.0.64. 2018-11-19 Version 0.6.2 ++++++++++++++++++++++++ **Bugfixes** - Fix circular dependency in TYPE_CHECKING mode #128 2018-10-15 Version 0.6.1 ++++++++++++++++++++++++ **Bugfixes** - Remove unecessary verbose "warnings" log #126 2018-10-02 Version 0.6.0 ++++++++++++++++++++++++ **Features** - The environment variable AZURE_HTTP_USER_AGENT, if present, is now injected part of the UserAgent - New **preview** msrest.universal_http module. Provide tools to generic HTTP management (sync/async, requests/aiohttp, etc.) - New **preview** msrest.pipeline implementation: - A Pipeline is an ordered list of Policies than can process an HTTP request and response in a generic way. - More details in the wiki page about Pipeline: https://github.com/Azure/msrest-for-python/wiki/msrest-0.6.0---Pipeline - Adding new attributes to Configuration instance: - http_logger_policy - Policy to handle HTTP logging - user_agent_policy - Policy to handle UserAgent - pipeline - The current pipeline used by the SDK client - async_pipeline - The current async pipeline used by the async SDK client - Installing "msrest[async]" now installs the **experimental** async support. Works ONLY for Autorest.Python 4.0.63. **Breaking changes** - The HTTPDriver API introduced in 0.5.0 has been replaced by the Pipeline implementation. - The following classes have been moved from "msrest.pipeline" to "msrest.universal_http": - ClientRedirectPolicy - ClientProxies - ClientConnection - The following classes have been moved from "msrest.pipeline" to "msrest.universal_http.requests": - ClientRetryPolicy **Bugfixes** - Fix "long" on Python 2 if used with the "object" type #121 Thanks to robgolding for the contribution 2018-09-04 Version 0.5.5 ++++++++++++++++++++++++ **Bugfixes** - Fix a serialization issue if additional_properties is declared, and "automatic model" syntax is used ("automatic model" being the ability to pass a dict to command and have the model auto-created) # 120 2018-07-12 Version 0.5.4 ++++++++++++++++++++++++ **Features** - Support additionalProperties and XML **BugFixes** - Better parse empty node and not string types - Improve "object" XML parsing 2018-07-10 Version 0.5.3 ++++++++++++++++++++++++ **BugFixes** - Fix some XML serialization subtle scenarios 2018-07-09 Version 0.5.2 ++++++++++++++++++++++++ **Features** - deserialize/from_dict now accepts a content-type parameter to parse XML strings **Bugfixes** - Fix some complex XML Swagger definitions. This release likely breaks already generated XML SDKs, that needs to be regenerated with autorest.python 3.0.58 2018-06-21 Version 0.5.1 ++++++++++++++++++++++++ **Bugfixes** - Lower Accept header overwrite logging message #110 - Fix 'object' type and XML format Thanks to dharmab for the contribution 2018-06-12 Version 0.5.0 ++++++++++++++++++++++++ **Disclaimer** This released is designed to be backward compatible with 0.4.x, but there is too many internal refactoring and new features to continue with 0.4.x versionning **Features** - Add XML support - Add many type hints, and MyPY testing on CI. - HTTP calls are made through a HTTPDriver API. Only implementation is `requests` for now. This driver API is *not* considered stable and you should pin your msrest version if you want to provide a personal implementation. **Bugfixes** - Incorrect milliseconds serialization for some datetime object #94 **Deprecation** That will trigger a DeprecationWarning if an old Autorest generated code is used. - _client.add_header is deprecated, and config.headers should be used instead - _client.send_formdata is deprecated, and _client.put/get/delete/post + _client.send should be used instead 2018-04-30 Version 0.4.29 +++++++++++++++++++++++++ **Bugfixes** - Improve `SDKClient.__exit__` to take exc_details as optional parameters and not required #93 - refresh_session should also use the permanent HTTP session if available #91 2018-04-18 Version 0.4.28 +++++++++++++++++++++++++ **Features** - msrest is now able to keep the "requests.Session" alive for performance. To activate this behavior: - Use the final Client as a context manager (requires generation with Autorest.Python 3.0.50 at least) - Use `client.config.keep_alive = True` and `client.close()` (requires generation with Autorest.Python 3.0.50 at least) - Use `client.config.keep_alive = True` and client._client.close() (not recommended, but available in old releases of SDK) - All Authentication classes now define `signed_session` and `refresh_session` with an optional `session` parameter. To take benefits of the session improvement, a subclass of Authentication *MUST* add this optional parameter and use it if it's not `None`: def signed_session(self, session=None): session = session or requests.Session() # As usual from here. 2018-03-07 Version 0.4.27 +++++++++++++++++++++++++ **Features** - Disable HTTP log by default (security), add `enable_http_log` to restore it #86 **BugFixes** - Fix incorrect date parsing if ms precision is over 6 digits #82 2018-01-30 Version 0.4.26 +++++++++++++++++++++++++ **Features** - Add TopicCredentials for EventGrid client **Bugfixes** - Fix minimal dependency of isodate - Fix serialisation from dict if datetime provided 2018-01-08 Version 0.4.25 +++++++++++++++++++++++++ **Features** - Add LROPoller class. This is a customizable LRO engine. This is the poller engine of Autorest.Python 3.0, and is not used by code generated by previous Autorest version. 2018-01-03 Version 0.4.24 +++++++++++++++++++++++++ **Bugfixes** - Date parsing is now compliant with Autorest / Swagger 2.0 specification (less lenient) **Internal optimisation** - Call that does not return a streamable object are now executed in requests stream mode False (was True whatever the type of the call). This should reduce the number of leaked opened session and allow urllib3 to manage connection pooling more efficiently. Only clients generated with Autorest.Python >= 2.1.31 (not impacted otherwise, fully backward compatible) 2017-12-21 Version 0.4.23 +++++++++++++++++++++++++ **Bugfixes** - Accept to deserialize enum of different type if content string match #75 - Stop failing on deserialization if enum string is unkwon. Return the string instead. **Features** - Model now accept kwargs in constructor for future kwargs models 2017-12-15 Version 0.4.22 +++++++++++++++++++++++++ **Bugfixes** - Do not validate additional_properties #73 - Improve validation error if expected type is dict, but actual type is not #73 2017-12-14 Version 0.4.21 +++++++++++++++++++++++++ **Bugfixes** - Fix additional_properties if Swagger was flatten #72 2017-12-13 Version 0.4.20 +++++++++++++++++++++++++ **Features** - Add support for additional_properties - By default, all additional_properties are kept. - Additional properties are sent to the server only if it was specified in the Swagger, or if "enable_additional_properties_sending" is called on the model we want it. This is a class method that enables it for all instance of this model. 2017-11-20 Version 0.4.19 +++++++++++++++++++++++++ **Features** - The interpretation of Swagger 2.0 "discriminator" is now lenient. This means for these two scenarios: - Discriminator value is missing from the received payload - Discriminator value is not defined in the Swagger Instead of failing with an exception, this now returns the base type for this "discriminator". Note that this is not a contradiction of the Swagger 2.0 spec, that specifies "validation SHOULD fail [...] there may exist valid reasons in particular circumstances to ignore a particular item, but the full implications must be understood and carefully weighed before choosing a different course." This cannot be configured for now and is the new default behvaior, but can be in the future if needed. **Bugfixes** - Optional formdata parameters were raising an exception (#65) - "application/x-www-form-urlencoded" form was sent using "multipart/form-data". This causes problems if the server does not support "multipart/form-data" (#66) 2017-10-26 Version 0.4.18 +++++++++++++++++++++++++ **Features** - Add ApiKeyCredentials class. This can be used to support OpenAPI ApiKey feature. - Add CognitiveServicesAuthentication class. Pre-declared ApiKeyCredentials class for Cognitive Services. 2017-10-12 Version 0.4.17 +++++++++++++++++++++++++ **Features** This make Authentication classes more consistent: - OAuthTokenAuthentication is now a subclass of BasicTokenAuthentication (was Authentication) - BasicTokenAuthentication has now a "set_token" methods that does nothing. This allows test like "isintance(o, BasicTokenAuthentication)" to be guaranted that the following attributes exists: - token - set_token() - signed_session() This means for users of "msrestazure", that they are guaranted that all AD classes somehow inherits from "BasicTokenAuthentication" 2017-10-05 Version 0.4.16 +++++++++++++++++++++++++ **Bugfixes** - Fix regression: accept "set" as a valid "[str]" (#60) 2017-09-28 Version 0.4.15 +++++++++++++++++++++++++ **Bugfixes** - Always log response body (#16) - Improved exception message if error JSON is Odata v4 (#55) - Refuse "str" as a valid "[str]" type (#41) - Better exception handling if input from server is not JSON valid **Features** - Add Configuration.session_configuration_callback to customize the requests.Session if necessary (#52) - Add a flag to Serializer to disable client-side-validation (#51) - Remove "import requests" from "exceptions.py" for apps that require fast loading time (#23) Thank you to jayden-at-arista for the contribution 2017-08-23 Version 0.4.14 +++++++++++++++++++++++++ **Bugfixes** - Fix regression introduced in msrest 0.4.12 - dict syntax with enum modeled as string and enum used 2017-08-22 Version 0.4.13 +++++++++++++++++++++++++ **Bugfixes** - Fix regression introduced in msrest 0.4.12 - dict syntax using isodate.Duration (#42) 2017-08-21 Version 0.4.12 +++++++++++++++++++++++++ **Features** - Input is now more lenient - Model have a "validate" method to check content constraints - Model have now 4 new methods: - "serialize" that gives the RestAPI that will be sent - "as_dict" that returns a dict version of the Model. Callbacks are available. - "deserialize" the parses the RestAPI JSON into a Model - "from_dict" that parses several dict syntax into a Model. Callbacks are available. More details and examples in the Wiki article on Github: https://github.com/Azure/msrest-for-python/wiki/msrest-0.4.12---Serialization-change **Bugfixes** - Better Enum checking (#38) 2017-06-21 Version 0.4.11 +++++++++++++++++++++++++ **Bugfixes** - Fix incorrect dependency to "requests" 2.14.x, instead of 2.x meant in 0.4.8 2017-06-15 Version 0.4.10 +++++++++++++++++++++++++ **Features** - Add requests hooks to configuration 2017-06-08 Version 0.4.9 ++++++++++++++++++++++++ **Bugfixes** - Accept "null" value for paging array as an empty list and do not raise (#30) 2017-05-22 Version 0.4.8 ++++++++++++++++++++++++ **Bugfixes** - Fix random "pool is closed" error (#29) - Fix requests dependency to version 2.x, since version 3.x is annunced to be breaking. 2017-04-04 Version 0.4.7 ++++++++++++++++++++++++ **BugFixes** - Refactor paging #22: - "next" is renamed "advance_page" and "next" returns only 1 element (Python 2 expected behavior) - paging objects are now real generator and support the "next()" built-in function without need for "iter()" - Raise accurate DeserialisationError on incorrect RestAPI discriminator usage #27 - Fix discriminator usage of the base class name #27 - Remove default mutable arguments in Clients #20 - Fix object comparison in some scenarios #24 2017-03-06 Version 0.4.6 ++++++++++++++++++++++++ **Bugfixes** - Allow Model sub-classes to be serialized if type is "object" 2017-02-13 Version 0.4.5 ++++++++++++++++++++++++ **Bugfixes** - Fix polymorphic deserialization #11 - Fix regexp validation if '\\w' is used in Python 2.7 #13 - Fix dict deserialization if keys are unicode in Python 2.7 **Improvements** - Add polymorphic serialisation from dict objects - Remove chardet and use HTTP charset declaration (fallback to utf8) 2016-09-14 Version 0.4.4 ++++++++++++++++++++++++ **Bugfixes** - Remove paging URL validation, part of fix https://github.com/Azure/autorest/pull/1420 **Disclaimer** In order to get paging fixes for impacted clients, you need this package and Autorest > 0.17.0 Nightly 20160913 2016-09-01 Version 0.4.3 ++++++++++++++++++++++++ **Bugfixes** - Better exception message (https://github.com/Azure/autorest/pull/1300) 2016-08-15 Version 0.4.2 ++++++++++++++++++++++++ **Bugfixes** - Fix serialization if "object" type contains None (https://github.com/Azure/autorest/issues/1353) 2016-08-08 Version 0.4.1 ++++++++++++++++++++++++ **Bugfixes** - Fix compatibility issues with requests 2.11.0 (https://github.com/Azure/autorest/issues/1337) - Allow url of ClientRequest to have parameters (https://github.com/Azure/autorest/issues/1217) 2016-05-25 Version 0.4.0 ++++++++++++++++++++++++ This version has no bug fixes, but implements new features of Autorest: - Base64 url type - unixtime type - x-ms-enum modelAsString flag **Behaviour changes** - Add Platform information in UserAgent - Needs Autorest > 0.17.0 Nightly 20160525 2016-04-26 Version 0.3.0 ++++++++++++++++++++++++ **Bugfixes** - Read only values are no longer in __init__ or sent to the server (https://github.com/Azure/autorest/pull/959) - Useless kwarg removed **Behaviour changes** - Needs Autorest > 0.16.0 Nightly 20160426 2016-03-25 Version 0.2.0 ++++++++++++++++++++++++ **Bugfixes** - Manage integer enum values (https://github.com/Azure/autorest/pull/879) - Add missing application/json Accept HTTP header (https://github.com/Azure/azure-sdk-for-python/issues/553) **Behaviour changes** - Needs Autorest > 0.16.0 Nightly 20160324 2016-03-21 Version 0.1.3 ++++++++++++++++++++++++ **Bugfixes** - Deserialisation of generic resource if null in JSON (https://github.com/Azure/azure-sdk-for-python/issues/544) 2016-03-14 Version 0.1.2 ++++++++++++++++++++++++ **Bugfixes** - urllib3 side effect (https://github.com/Azure/autorest/issues/824) 2016-03-04 Version 0.1.1 ++++++++++++++++++++++++ **Bugfixes** - Source package corrupted in Pypi (https://github.com/Azure/autorest/issues/799) 2016-03-04 Version 0.1.0 +++++++++++++++++++++++++ **Behavioural Changes** - Removed custom logging set up and configuration. All loggers are now children of the root logger 'msrest' with no pre-defined configurations. - Replaced _required attribute in Model class with more extensive _validation dict. **Improvement** - Removed hierarchy scanning for attribute maps from base Model class - relies on generator to populate attribute maps according to hierarchy. - Base class Paged now inherits from collections.Iterable. - Data validation during serialization using custom parameters (e.g. max, min etc). - Added ValidationError to be raised if invalid data encountered during serialization. 2016-02-29 Version 0.0.3 ++++++++++++++++++++++++ **Bugfixes** - Source package corrupted in Pypi (https://github.com/Azure/autorest/issues/718) 2016-02-19 Version 0.0.2 ++++++++++++++++++++++++ **Bugfixes** - Fixed bug in exception logging before logger configured. 2016-02-19 Version 0.0.1 ++++++++++++++++++++++++ - Initial release. msrest-for-python-0.6.21/autorest_setup.sh000077500000000000000000000001161400412460500206370ustar00rootroot00000000000000pushd autorest.python/test/vanilla/ && pip install -r requirements.txt && popdmsrest-for-python-0.6.21/dev_requirements.txt000066400000000000000000000005511400412460500213370ustar00rootroot00000000000000-e . mock;python_version<="2.7" futures;python_version<="2.7" httpretty>=0.8.10 coverage<5.0.0 pytest pytest-cov pytest-asyncio;python_full_version>="3.5.2" mypy;python_full_version>="3.5.2" pylint aiohttp;python_full_version>="3.5.2" # async in msrest was experimental, we won't update trio==0.14.0;python_version == '3.5' trio==0.16.0;python_version >= '3.6' msrest-for-python-0.6.21/doc/000077500000000000000000000000001400412460500157615ustar00rootroot00000000000000msrest-for-python-0.6.21/doc/conf.py000066400000000000000000000172151400412460500172660ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # azure-sdk-for-python documentation build configuration file, created by # sphinx-quickstart on Fri Jun 27 15:42:45 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import pip import sphinx_rtd_theme # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('../')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'sphinx.ext.doctest', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx'] intersphinx_mapping = { 'python': ('https://docs.python.org/3.6', None), } # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] source_parsers = { '.md': 'recommonmark.parser.CommonMarkParser', } # The suffix of source filenames. source_suffix = ['.rst', '.md'] # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'msrest' copyright = u'2016-2018, Microsoft' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.5.4' # The full version, including alpha/beta/rc tags. release = '0.5.4' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for extensions ---------------------------------------------------- autoclass_content = 'both' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' html_theme = 'sphinx_rtd_theme' #html_theme_options = {'collapsiblesidebar': True} # Activate the theme. #pip.main(['install', 'sphinx_bootstrap_theme']) #import sphinx_bootstrap_theme #html_theme = 'bootstrap' #html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'msrest-doc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'msrest.tex', u'msrest Documentation', u'Microsoft', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True msrest-for-python-0.6.21/doc/index.md000066400000000000000000000004261400412460500174140ustar00rootroot00000000000000 msrest's documentation has moved from ReadTheDocs to docs.microsoft.com. msrest-for-python-0.6.21/doc/make.bat000066400000000000000000000145071400412460500173750ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pydocumentdb.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pydocumentdb.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end msrest-for-python-0.6.21/doc/requirements.txt000066400000000000000000000000441400412460500212430ustar00rootroot00000000000000sphinx sphinx_rtd_theme recommonmarkmsrest-for-python-0.6.21/msrest/000077500000000000000000000000001400412460500165315ustar00rootroot00000000000000msrest-for-python-0.6.21/msrest/__init__.py000066400000000000000000000031421400412460500206420ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from .version import msrest_version from .configuration import Configuration from .service_client import ServiceClient, SDKClient from .serialization import Serializer, Deserializer __all__ = [ "ServiceClient", "SDKClient", "Serializer", "Deserializer", "Configuration" ] __version__ = msrest_version msrest-for-python-0.6.21/msrest/async_client.py000066400000000000000000000114451400412460500215630ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import functools import logging from typing import Any, Dict, List, Union, TYPE_CHECKING from .universal_http import ClientRequest from .universal_http.async_requests import AsyncRequestsHTTPSender from .pipeline import Request, AsyncPipeline, AsyncHTTPPolicy, SansIOHTTPPolicy from .pipeline.async_requests import ( AsyncPipelineRequestsHTTPSender, AsyncRequestsCredentialsPolicy ) from .pipeline.universal import ( HTTPLogger, RawDeserializer, ) from .service_client import _ServiceClientCore if TYPE_CHECKING: from .configuration import Configuration # pylint: disable=unused-import _LOGGER = logging.getLogger(__name__) class SDKClientAsync: """The base class of all generated SDK async client. """ def __init__(self, config: 'Configuration') -> None: self._client = ServiceClientAsync(config) async def __aenter__(self): await self._client.__aenter__() return self async def __aexit__(self, *exc_details): await self._client.__aexit__(*exc_details) class ServiceClientAsync(_ServiceClientCore): def __init__(self, config: 'Configuration') -> None: super(ServiceClientAsync, self).__init__(config) self.config.pipeline = self._create_default_pipeline() # type: ignore def _create_default_pipeline(self): creds = self.config.credentials policies = [ self.config.user_agent_policy, # UserAgent policy RawDeserializer(), # Deserialize the raw bytes self.config.http_logger_policy # HTTP request/response log ] # type: List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]] if creds: if isinstance(creds, (AsyncHTTPPolicy, SansIOHTTPPolicy)): policies.insert(1, creds) else: # Assume this is the old credentials class, and then requests. Wrap it. policies.insert(1, AsyncRequestsCredentialsPolicy(creds)) return AsyncPipeline( policies, AsyncPipelineRequestsHTTPSender( AsyncRequestsHTTPSender(self.config) # Send HTTP request using requests ) ) async def __aenter__(self): await self.config.pipeline.__aenter__() return self async def __aexit__(self, *exc_details): await self.config.pipeline.__aexit__(*exc_details) async def async_send(self, request, **kwargs): """Prepare and send request object according to configuration. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param content: Any body data to add to the request. :param config: Any specific config overrides """ kwargs.setdefault('stream', True) # In the current backward compatible implementation, return the HTTP response # and plug context inside. Could be remove if we modify Autorest, # but we still need it to be backward compatible pipeline_response = await self.config.pipeline.run(request, **kwargs) response = pipeline_response.http_response response.context = pipeline_response.context return response def stream_download_async(self, response, user_callback): """Async Generator for streaming request body data. :param response: The initial response :param user_callback: Custom callback for monitoring progress. """ block = self.config.connection.data_block_size return response.stream_download(block, user_callback) msrest-for-python-0.6.21/msrest/async_paging.py000066400000000000000000000064301400412460500215500ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from collections.abc import AsyncIterator import logging _LOGGER = logging.getLogger(__name__) class AsyncPagedMixin(AsyncIterator): def __init__(self, *args, **kwargs): """Bring async to Paging. "async_command" is mandatory keyword argument for this mixin to work. """ self._async_get_next = kwargs.get("async_command") if not self._async_get_next: _LOGGER.debug("Paging async iterator protocol is not available for %s", self.__class__.__name__) async def async_get(self, url): """Get an arbitrary page. This resets the iterator and then fully consumes it to return the specific page **only**. :param str url: URL to arbitrary page results. """ self.reset() self.next_link = url return await self.async_advance_page() async def async_advance_page(self): if not self._async_get_next: raise NotImplementedError( "The class %s does not support async paging at the moment.", self.__class__.__name__ ) if self.next_link is None: raise StopAsyncIteration("End of paging") self._current_page_iter_index = 0 self._response = await self._async_get_next(self.next_link) self._derserializer(self, self._response) return self.current_page async def __anext__(self): """Iterate through responses.""" # Storing the list iterator might work out better, but there's no # guarantee that some code won't replace the list entirely with a copy, # invalidating an list iterator that might be saved between iterations. if self.current_page and self._current_page_iter_index < len(self.current_page): response = self.current_page[self._current_page_iter_index] self._current_page_iter_index += 1 return response else: await self.async_advance_page() return await self.__anext__() msrest-for-python-0.6.21/msrest/authentication.py000066400000000000000000000252211400412460500221240ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from typing import Optional, Dict import requests from requests.auth import HTTPBasicAuth import requests_oauthlib as oauth class Authentication(object): """Default, simple auth object. Doesn't actually add any auth headers. """ header = "Authorization" def signed_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Create requests session with any required auth headers applied. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ return session or requests.Session() class BasicAuthentication(Authentication): """Implementation of Basic Authentication. :param str username: Authentication username. :param str password: Authentication password. """ def __init__(self, username, password): # type: (str, str) -> None self.scheme = 'Basic' self.username = username self.password = password def signed_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Create requests session with any required auth headers applied. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ session = super(BasicAuthentication, self).signed_session(session) session.auth = HTTPBasicAuth(self.username, self.password) return session class BasicTokenAuthentication(Authentication): """Simple Token Authentication. Does not adhere to OAuth, simply adds provided token as a header. :param dict[str,str] token: Authentication token, must have 'access_token' key. """ def __init__(self, token): # type: (Dict[str, str]) -> None self.scheme = 'Bearer' self.token = token def set_token(self): # type: () -> None """Should be used to define the self.token attribute. In this implementation, does nothing since the token is statically provided at creation. """ pass def signed_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Create requests session with any required auth headers applied. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ session = super(BasicTokenAuthentication, self).signed_session(session) header = "{} {}".format(self.scheme, self.token['access_token']) session.headers['Authorization'] = header return session class OAuthTokenAuthentication(BasicTokenAuthentication): """OAuth Token Authentication. Requires that supplied token contains an expires_in field. :param str client_id: Account Client ID. :param dict[str,str] token: OAuth2 token. """ def __init__(self, client_id, token): # type: (str, Dict[str, str]) -> None super(OAuthTokenAuthentication, self).__init__(token) self.id = client_id self.store_key = self.id def construct_auth(self): # type: () -> str """Format token header. :rtype: str """ return "{} {}".format(self.scheme, self.token) def refresh_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Return updated session if token has expired, attempts to refresh using refresh token. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ return self.signed_session(session) def signed_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Create requests session with any required auth headers applied. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ session = session or requests.Session() # Don't call super on purpose, let's "auth" manage the headers. session.auth = oauth.OAuth2(self.id, token=self.token) return session class KerberosAuthentication(Authentication): """Kerberos Authentication Kerberos Single Sign On (SSO); requires requests_kerberos is installed. :param mutual_authentication: whether to require mutual authentication. Use values from requests_kerberos import REQUIRED, OPTIONAL, or DISABLED """ def __init__(self, mutual_authentication=None): super(KerberosAuthentication, self).__init__() self.mutual_authentication = mutual_authentication def signed_session(self, session=None): """Create requests session with Negotiate (SPNEGO) headers applied. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ session = super(KerberosAuthentication, self).signed_session(session) try: from requests_kerberos import HTTPKerberosAuth except ImportError: raise ImportError("In order to use KerberosAuthentication please do 'pip install requests_kerberos' first") if self.mutual_authentication: session.auth = HTTPKerberosAuth(mutual_authentication=self.mutual_authentication) else: session.auth = HTTPKerberosAuth() return session class ApiKeyCredentials(Authentication): """Represent the ApiKey feature of Swagger. Dict should be dict[str,str] to be accepted by requests. :param dict[str,str] in_headers: Headers part of the ApiKey :param dict[str,str] in_query: ApiKey in the query as parameters """ def __init__(self, in_headers=None, in_query=None): # type: (Optional[Dict[str, str]], Optional[Dict[str, str]]) -> None super(ApiKeyCredentials, self).__init__() if in_headers is None: in_headers = {} if in_query is None: in_query = {} if not in_headers and not in_query: raise ValueError("You need to define in_headers or in_query") self.in_headers = in_headers self.in_query = in_query def signed_session(self, session=None): # type: (Optional[requests.Session]) -> requests.Session """Create requests session with ApiKey. If a session object is provided, configure it directly. Otherwise, create a new session and return it. :param session: The session to configure for authentication :type session: requests.Session :rtype: requests.Session """ session = super(ApiKeyCredentials, self).signed_session(session) session.headers.update(self.in_headers) try: # params is actually Union[bytes, MutableMapping[Text, Text]] session.params.update(self.in_query) # type: ignore except AttributeError: # requests.params can be bytes raise ValueError("session.params must be a dict to be used in ApiKeyCredentials") return session class CognitiveServicesCredentials(ApiKeyCredentials): """Cognitive Services authentication. :param str subscription_key: The CS subscription key """ _subscription_key_header = 'Ocp-Apim-Subscription-Key' def __init__(self, subscription_key): # type: (str) -> None if not subscription_key: raise ValueError("Subscription key cannot be None") super(CognitiveServicesCredentials, self).__init__( in_headers={ self._subscription_key_header: subscription_key, 'X-BingApis-SDK-Client': 'Python-SDK' } ) class TopicCredentials(ApiKeyCredentials): """Event Grid authentication. :param str topic_key: The Event Grid topic key """ _topic_key_header = 'aeg-sas-key' def __init__(self, topic_key): # type: (str) -> None if not topic_key: raise ValueError("Topic key cannot be None") super(TopicCredentials, self).__init__( in_headers={ self._topic_key_header: topic_key, } ) class DomainCredentials(ApiKeyCredentials): """Event Grid domain authentication. :param str domain_key: The Event Grid domain key """ _domain_key_header = 'aeg-sas-key' def __init__(self, domain_key): # type: (str) -> None if not domain_key: raise ValueError("Domain key cannot be None") super(DomainCredentials, self).__init__( in_headers={ self._domain_key_header: domain_key, } ) msrest-for-python-0.6.21/msrest/configuration.py000066400000000000000000000071001400412460500217500ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- try: import configparser from configparser import NoOptionError except ImportError: import ConfigParser as configparser # type: ignore from ConfigParser import NoOptionError # type: ignore from typing import TYPE_CHECKING, Optional, Dict, List, Any, Callable, Union # pylint: disable=unused-import from .pipeline import Pipeline from .universal_http.requests import ( RequestHTTPSenderConfiguration ) from .pipeline.universal import ( UserAgentPolicy, HTTPLogger, ) if TYPE_CHECKING: from .pipeline import AsyncPipeline class Configuration(RequestHTTPSenderConfiguration): """Client configuration. :param str baseurl: REST API base URL. :param str filepath: Path to existing config file (optional). """ def __init__(self, base_url, filepath=None): # type: (str, Optional[str]) -> None super(Configuration, self).__init__(filepath) # Service self.base_url = base_url # User-Agent as a policy self.user_agent_policy = UserAgentPolicy() # HTTP logger policy self.http_logger_policy = HTTPLogger() # The pipeline. We don't know until a ServiceClient use this configuration if it will be sync or async # We instantiate with a default empty Pipeline for mypy mostly, trying to use a pipeline from a pure # configuration object doesn't make sense. self.pipeline = Pipeline() # type: Union[Pipeline, AsyncPipeline] # If set to True, ServiceClient will own the sessionn self.keep_alive = False # Potential credentials pre-declared self.credentials = None if filepath: self.load(filepath) @property def user_agent(self): # type: () -> str """The current user agent value.""" return self.user_agent_policy.user_agent def add_user_agent(self, value): # type: (str) -> None """Add value to current user agent with a space. :param str value: value to add to user agent. """ self.user_agent_policy.add_user_agent(value) @property def enable_http_logger(self): return self.http_logger_policy.enable_http_logger @enable_http_logger.setter def enable_http_logger(self, value): self.http_logger_policy.enable_http_logger = value msrest-for-python-0.6.21/msrest/exceptions.py000066400000000000000000000175531400412460500212770ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import logging import sys from typing import Callable, Any, Optional, TYPE_CHECKING _LOGGER = logging.getLogger(__name__) def raise_with_traceback(exception, message="", *args, **kwargs): # type: (Callable, str, Any, Any) -> None """Raise exception with a specified traceback. This MUST be called inside a "except" clause. :param Exception exception: Error type to be raised. :param str message: Message to include with error, empty by default. :param args: Any additional args to be included with exception. """ exc_type, exc_value, exc_traceback = sys.exc_info() # If not called inside a "except", exc_type will be None. Assume it will not happen exc_msg = "{}, {}: {}".format(message, exc_type.__name__, exc_value) # type: ignore error = exception(exc_msg, *args, **kwargs) try: raise error.with_traceback(exc_traceback) except AttributeError: error.__traceback__ = exc_traceback raise error class ClientException(Exception): """Base exception for all Client Runtime exceptions. :param str message: Description of exception. :param Exception inner_exception: Nested exception (optional). """ def __init__(self, message, inner_exception=None, *args, **kwargs): # type: (str, Any, str, str) -> None self.inner_exception = inner_exception _LOGGER.debug(message) super(ClientException, self).__init__(message, *args, **kwargs) # type: ignore class SerializationError(ClientException): """Error raised during request serialization.""" pass class DeserializationError(ClientException): """Error raised during response deserialization.""" pass class TokenExpiredError(ClientException): """OAuth token expired, request failed.""" pass class ValidationError(ClientException): """Request parameter validation failed. :param str rule: Validation rule. :param str target: Target value for the rule. :param str value: Value that was invalid. """ _messages = { "min_length": "must have length greater than {!r}.", "max_length": "must have length less than {!r}.", "minimum_ex": "must be greater than {!r}.", "maximum_ex": "must be less than {!r}.", "minimum": "must be equal to or greater than {!r}.", "maximum": "must be equal to or less than {!r}.", "min_items": "must contain at least {!r} items.", "max_items": "must contain at most {!r} items.", "pattern": "must conform to the following pattern: {!r}.", "unique": "must contain only unique items.", "multiple": "must be a multiple of {!r}.", "required": "can not be None.", "type": "must be of type {!r}" } @staticmethod def _format_message(rule, reason, value): if rule == "type" and value.startswith(r"{"): internal_type = value.strip(r"{}") value = "dict[str, {}]".format(internal_type) return reason.format(value) def __init__(self, rule, target, value, *args, **kwargs): # type: (str, str, str, str, str) -> None self.rule = rule self.target = target message = "Parameter {!r} ".format(target) reason = self._messages.get( rule, "failed to meet validation requirement.") message += self._format_message(rule, reason, value) super(ValidationError, self).__init__(message, *args, **kwargs) class ClientRequestError(ClientException): """Client request failed.""" pass class AuthenticationError(ClientException): """Client request failed to authenticate.""" pass # Needed only here for type checking if TYPE_CHECKING: import requests from .serialization import Deserializer class HttpOperationError(ClientException): """Client request failed due to server-specified HTTP operation error. Attempts to deserialize response into specific error object. :param Deserializer deserialize: Deserializer with data on custom error objects. :param requests.Response response: Server response :param str resp_type: Objects type to deserialize response. :param args: Additional args to pass to exception object. :ivar Model error: Deserialized error model. """ _DEFAULT_MESSAGE = "Unknown error" def __str__(self): # type: () -> str return str(self.message) def __init__(self, deserialize, response, resp_type=None, *args, **kwargs): # type: (Deserializer, Any, Optional[str], str, str) -> None self.error = None self.message = self._DEFAULT_MESSAGE if hasattr(response, 'internal_response'): self.response = response.internal_response else: self.response = response try: if resp_type: self.error = deserialize(resp_type, response) if self.error is None: self.error = deserialize.dependencies[resp_type]() # ARM uses OData v4, try that by default # http://docs.oasis-open.org/odata/odata-json-format/v4.0/os/odata-json-format-v4.0-os.html#_Toc372793091 # Code and Message are REQUIRED try: self.message = "({}) {}".format( self.error.error.code, self.error.error.message ) except AttributeError: # Try the default for Autorest if not available (compat) if self.error.message: self.message = self.error.message except (DeserializationError, AttributeError, KeyError): pass if not self.error or self.message == self._DEFAULT_MESSAGE: try: response.raise_for_status() # Two possible raises here: # - Attribute error if response is not ClientResponse. Do not catch. # - Any internal exception, take it. except AttributeError: raise except Exception as err: # pylint: disable=broad-except if not self.error: self.error = err if self.message == self._DEFAULT_MESSAGE: msg = "Operation returned an invalid status code {!r}" self.message = msg.format(response.reason) else: if not self.error: self.error = response # We can't type hint, but at least we can check that assert self.message is not None super(HttpOperationError, self).__init__( self.message, self.error, *args, **kwargs) msrest-for-python-0.6.21/msrest/http_logger.py000066400000000000000000000103341400412460500214220ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import logging import re import types from typing import Any, Optional, TYPE_CHECKING # pylint: disable=unused-import if TYPE_CHECKING: from .universal_http import ClientRequest, ClientResponse # pylint: disable=unused-import _LOGGER = logging.getLogger(__name__) def log_request(_, request, *_args, **_kwargs): # type: (Any, ClientRequest, str, str) -> None """Log a client request. :param _: Unused in current version (will be None) :param requests.Request request: The request object. """ if not _LOGGER.isEnabledFor(logging.DEBUG): return try: _LOGGER.debug("Request URL: %r", request.url) _LOGGER.debug("Request method: %r", request.method) _LOGGER.debug("Request headers:") for header, value in request.headers.items(): if header.lower() == 'authorization': value = '*****' _LOGGER.debug(" %r: %r", header, value) _LOGGER.debug("Request body:") # We don't want to log the binary data of a file upload. if isinstance(request.body, types.GeneratorType): _LOGGER.debug("File upload") else: _LOGGER.debug(str(request.body)) except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log request: %r", err) def log_response(_, _request, response, *_args, **kwargs): # type: (Any, ClientRequest, ClientResponse, str, Any) -> Optional[ClientResponse] """Log a server response. :param _: Unused in current version (will be None) :param requests.Request request: The request object. :param requests.Response response: The response object. """ if not _LOGGER.isEnabledFor(logging.DEBUG): return None try: _LOGGER.debug("Response status: %r", response.status_code) _LOGGER.debug("Response headers:") for res_header, value in response.headers.items(): _LOGGER.debug(" %r: %r", res_header, value) # We don't want to log binary data if the response is a file. _LOGGER.debug("Response content:") pattern = re.compile(r'attachment; ?filename=["\w.]+', re.IGNORECASE) header = response.headers.get('content-disposition') if header and pattern.match(header): filename = header.partition('=')[2] _LOGGER.debug("File attachments: %s", filename) elif response.headers.get("content-type", "").endswith("octet-stream"): _LOGGER.debug("Body contains binary data.") elif response.headers.get("content-type", "").startswith("image"): _LOGGER.debug("Body contains image data.") else: if kwargs.get('stream', False): _LOGGER.debug("Body is streamable") else: _LOGGER.debug(response.text()) return response except Exception as err: # pylint: disable=broad-except _LOGGER.debug("Failed to log response: %s", repr(err)) return response msrest-for-python-0.6.21/msrest/paging.py000066400000000000000000000126731400412460500203610ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import sys try: from collections.abc import Iterator xrange = range except ImportError: from collections import Iterator from typing import Dict, Any, List, Callable, Optional, TYPE_CHECKING # pylint: disable=unused-import from .serialization import Deserializer from .pipeline import ClientRawResponse if TYPE_CHECKING: from .universal_http import ClientResponse # pylint: disable=unused-import from .serialization import Model # pylint: disable=unused-import if sys.version_info >= (3, 5, 2): # Not executed on old Python, no syntax error from .async_paging import AsyncPagedMixin # type: ignore else: class AsyncPagedMixin(object): # type: ignore pass class Paged(AsyncPagedMixin, Iterator): """A container for paged REST responses. :param ClientResponse response: server response object. :param callable command: Function to retrieve the next page of items. :param dict classes: A dictionary of class dependencies for deserialization. :param dict raw_headers: A dict of raw headers to add if "raw" is called """ _validation = {} # type: Dict[str, Dict[str, Any]] _attribute_map = {} # type: Dict[str, Dict[str, Any]] def __init__(self, command, classes, raw_headers=None, **kwargs): # type: (Callable[[str], ClientResponse], Dict[str, Model], Dict[str, str], Any) -> None super(Paged, self).__init__(**kwargs) # type: ignore # Sets next_link, current_page, and _current_page_iter_index. self.next_link = "" self._current_page_iter_index = 0 self.reset() self._derserializer = Deserializer(classes) self._get_next = command self._response = None # type: Optional[ClientResponse] self._raw_headers = raw_headers def __iter__(self): """Return 'self'.""" # Since iteration mutates this object, consider it an iterator in-and-of # itself. return self @classmethod def _get_subtype_map(cls): """Required for parity to Model object for deserialization.""" return {} @property def raw(self): # type: () -> ClientRawResponse """Get current page as ClientRawResponse. :rtype: ClientRawResponse """ raw = ClientRawResponse(self.current_page, self._response) if self._raw_headers: raw.add_headers(self._raw_headers) return raw def get(self, url): # type: (str) -> List[Model] """Get an arbitrary page. This resets the iterator and then fully consumes it to return the specific page **only**. :param str url: URL to arbitrary page results. """ self.reset() self.next_link = url return self.advance_page() def reset(self): # type: () -> None """Reset iterator to first page.""" self.next_link = "" self.current_page = [] # type: List[Model] self._current_page_iter_index = 0 def advance_page(self): # type: () -> List[Model] """Force moving the cursor to the next azure call. This method is for advanced usage, iterator protocol is prefered. :raises: StopIteration if no further page :return: The current page list :rtype: list """ if self.next_link is None: raise StopIteration("End of paging") self._current_page_iter_index = 0 self._response = self._get_next(self.next_link) self._derserializer(self, self._response) return self.current_page def __next__(self): """Iterate through responses.""" # Storing the list iterator might work out better, but there's no # guarantee that some code won't replace the list entirely with a copy, # invalidating an list iterator that might be saved between iterations. if self.current_page and self._current_page_iter_index < len(self.current_page): response = self.current_page[self._current_page_iter_index] self._current_page_iter_index += 1 return response else: self.advance_page() return self.__next__() next = __next__ # Python 2 compatibility. msrest-for-python-0.6.21/msrest/pipeline/000077500000000000000000000000001400412460500203365ustar00rootroot00000000000000msrest-for-python-0.6.21/msrest/pipeline/__init__.py000066400000000000000000000277031400412460500224600ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 import abc try: import configparser from configparser import NoOptionError except ImportError: import ConfigParser as configparser # type: ignore from ConfigParser import NoOptionError # type: ignore import json import logging import os.path try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse import xml.etree.ElementTree as ET from typing import TYPE_CHECKING, Generic, TypeVar, cast, IO, List, Union, Any, Mapping, Dict, Optional, Tuple, Callable, Iterator # pylint: disable=unused-import HTTPResponseType = TypeVar("HTTPResponseType") HTTPRequestType = TypeVar("HTTPRequestType") # This file is NOT using any "requests" HTTP implementation # However, the CaseInsensitiveDict is handy. # If one day we reach the point where "requests" can be skip totally, # might provide our own implementation from requests.structures import CaseInsensitiveDict _LOGGER = logging.getLogger(__name__) try: ABC = abc.ABC except AttributeError: # Python 2.7, abc exists, but not ABC ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) # type: ignore try: from contextlib import AbstractContextManager # type: ignore except ImportError: # Python <= 3.5 class AbstractContextManager(object): # type: ignore def __enter__(self): """Return `self` upon entering the runtime context.""" return self @abc.abstractmethod def __exit__(self, exc_type, exc_value, traceback): """Raise any exception triggered within the runtime context.""" return None class HTTPPolicy(ABC, Generic[HTTPRequestType, HTTPResponseType]): """An http policy ABC. """ def __init__(self): self.next = None @abc.abstractmethod def send(self, request, **kwargs): # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] """Mutate the request. Context content is dependent of the HTTPSender. """ pass class SansIOHTTPPolicy(Generic[HTTPRequestType, HTTPResponseType]): """Represents a sans I/O policy. This policy can act before the I/O, and after the I/O. Use this policy if the actual I/O in the middle is an implementation detail. Context is not available, since it's implementation dependent. if a policy needs a context of the Sender, it can't be universal. Example: setting a UserAgent does not need to be tight to sync or async implementation or specific HTTP lib """ def on_request(self, request, **kwargs): # type: (Request[HTTPRequestType], Any) -> None """Is executed before sending the request to next policy. """ pass def on_response(self, request, response, **kwargs): # type: (Request[HTTPRequestType], Response[HTTPRequestType, HTTPResponseType], Any) -> None """Is executed after the request comes back from the policy. """ pass def on_exception(self, request, **kwargs): # type: (Request[HTTPRequestType], Any) -> bool """Is executed if an exception comes back fron the following policy. Return True if the exception has been handled and should not be forwarded to the caller. This method is executed inside the exception handler. To get the exception, raise and catch it: try: raise except MyError: do_something() or use exc_type, exc_value, exc_traceback = sys.exc_info() """ return False class _SansIOHTTPPolicyRunner(HTTPPolicy, Generic[HTTPRequestType, HTTPResponseType]): """Sync implementation of the SansIO policy. """ def __init__(self, policy): # type: (SansIOHTTPPolicy) -> None super(_SansIOHTTPPolicyRunner, self).__init__() self._policy = policy def send(self, request, **kwargs): # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] self._policy.on_request(request, **kwargs) try: response = self.next.send(request, **kwargs) except Exception: if not self._policy.on_exception(request, **kwargs): raise else: self._policy.on_response(request, response, **kwargs) return response class Pipeline(AbstractContextManager, Generic[HTTPRequestType, HTTPResponseType]): """A pipeline implementation. This is implemented as a context manager, that will activate the context of the HTTP sender. """ def __init__(self, policies=None, sender=None): # type: (List[Union[HTTPPolicy, SansIOHTTPPolicy]], HTTPSender) -> None self._impl_policies = [] # type: List[HTTPPolicy] if not sender: # Import default only if nothing is provided from .requests import PipelineRequestsHTTPSender self._sender = cast(HTTPSender, PipelineRequestsHTTPSender()) else: self._sender = sender for policy in (policies or []): if isinstance(policy, SansIOHTTPPolicy): self._impl_policies.append(_SansIOHTTPPolicyRunner(policy)) else: self._impl_policies.append(policy) for index in range(len(self._impl_policies)-1): self._impl_policies[index].next = self._impl_policies[index+1] if self._impl_policies: self._impl_policies[-1].next = self._sender def __enter__(self): # type: () -> Pipeline self._sender.__enter__() return self def __exit__(self, *exc_details): # pylint: disable=arguments-differ self._sender.__exit__(*exc_details) def run(self, request, **kwargs): # type: (HTTPRequestType, Any) -> Response context = self._sender.build_context() pipeline_request = Request(request, context) # type: Request[HTTPRequestType] first_node = self._impl_policies[0] if self._impl_policies else self._sender return first_node.send(pipeline_request, **kwargs) # type: ignore class HTTPSender(AbstractContextManager, ABC, Generic[HTTPRequestType, HTTPResponseType]): """An http sender ABC. """ @abc.abstractmethod def send(self, request, **config): # type: (Request[HTTPRequestType], Any) -> Response[HTTPRequestType, HTTPResponseType] """Send the request using this HTTP sender. """ pass def build_context(self): # type: () -> Any """Allow the sender to build a context that will be passed across the pipeline with the request. Return type has no constraints. Implementation is not required and None by default. """ return None class Request(Generic[HTTPRequestType]): """Represents a HTTP request in a Pipeline. URL can be given without query parameters, to be added later using "format_parameters". Instance can be created without data, to be added later using "add_content" Instance can be created without files, to be added later using "add_formdata" :param str method: HTTP method (GET, HEAD, etc.) :param str url: At least complete scheme/host/path :param dict[str,str] headers: HTTP headers :param files: Files list. :param data: Body to be sent. :type data: bytes or str. """ def __init__(self, http_request, context=None): # type: (HTTPRequestType, Optional[Any]) -> None self.http_request = http_request self.context = context class Response(Generic[HTTPRequestType, HTTPResponseType]): """A pipeline response object. The Response interface exposes an HTTP response object as it returns through the pipeline of Policy objects. This ensures that Policy objects have access to the HTTP response. This also have a "context" dictionnary where policy can put additional fields. Policy SHOULD update the "context" dictionary with additional post-processed field if they create them. However, nothing prevents a policy to actually sub-class this class a return it instead of the initial instance. """ def __init__(self, request, http_response, context=None): # type: (Request[HTTPRequestType], HTTPResponseType, Optional[Dict[str, Any]]) -> None self.request = request self.http_response = http_response self.context = context or {} # ClientRawResponse is in Pipeline for compat, but technically there is nothing Pipeline here, this is deserialization if TYPE_CHECKING: from ..universal_http import ClientResponse class ClientRawResponse(object): """Wrapper for response object. This allows for additional data to be gathereded from the response, for example deserialized headers. It also allows the raw response object to be passed back to the user. :param output: Deserialized response object. This is the type that would have been returned directly by the main operation without raw=True. :param response: Raw response object (by default requests.Response instance) :type response: ~requests.Response """ def __init__(self, output, response): # type: (Union[Any], Optional[Union[Response, ClientResponse]]) -> None from ..serialization import Deserializer if isinstance(response, Response): # If pipeline response, remove that layer response = response.http_response try: # If universal driver, remove that layer self.response = response.internal_response # type: ignore except AttributeError: self.response = response self.output = output self.headers = {} # type: Dict[str, Optional[Any]] self._deserialize = Deserializer() def add_headers(self, header_dict): # type: (Dict[str, str]) -> None """Deserialize a specific header. :param dict header_dict: A dictionary containing the name of the header and the type to deserialize to. """ if not self.response: return for name, data_type in header_dict.items(): value = self.response.headers.get(name) value = self._deserialize(data_type, value) self.headers[name] = value __all__ = [ 'Request', 'Response', 'Pipeline', 'HTTPPolicy', 'SansIOHTTPPolicy', 'HTTPSender', # backward compat 'ClientRawResponse', ] try: from .async_abc import AsyncPipeline, AsyncHTTPPolicy, AsyncHTTPSender # pylint: disable=unused-import from .async_abc import __all__ as _async_all __all__ += _async_all except SyntaxError: # Python 2 pass except ImportError: # pyinstaller won't include Py3 files in Py2.7 mode pass msrest-for-python-0.6.21/msrest/pipeline/aiohttp.py000066400000000000000000000047231400412460500223660ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from typing import Any, Optional from ..universal_http.aiohttp import AioHTTPSender as _AioHTTPSenderDriver from . import AsyncHTTPSender, Request, Response # Matching requests, because why not? CONTENT_CHUNK_SIZE = 10 * 1024 class AioHTTPSender(AsyncHTTPSender): """AioHttp HTTP sender implementation. """ def __init__(self, driver: Optional[_AioHTTPSenderDriver] = None, *, loop=None) -> None: self.driver = driver or _AioHTTPSenderDriver(loop=loop) async def __aenter__(self): await self.driver.__aenter__() async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ await self.driver.__aexit__(*exc_details) def build_context(self) -> Any: """Allow the sender to build a context that will be passed across the pipeline with the request. Return type has no constraints. Implementation is not required and None by default. """ return None async def send(self, request: Request, **config: Any) -> Response: """Send the request using this HTTP sender. """ return Response( request, await self.driver.send(request.http_request) ) msrest-for-python-0.6.21/msrest/pipeline/async_abc.py000066400000000000000000000145411400412460500226370ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import abc from typing import Any, List, Union, Callable, AsyncIterator, Optional, Generic, TypeVar from . import Request, Response, Pipeline, SansIOHTTPPolicy AsyncHTTPResponseType = TypeVar("AsyncHTTPResponseType") HTTPRequestType = TypeVar("HTTPRequestType") try: from contextlib import AbstractAsyncContextManager # type: ignore except ImportError: # Python <= 3.7 class AbstractAsyncContextManager(object): # type: ignore async def __aenter__(self): """Return `self` upon entering the runtime context.""" return self @abc.abstractmethod async def __aexit__(self, exc_type, exc_value, traceback): """Raise any exception triggered within the runtime context.""" return None class AsyncHTTPPolicy(abc.ABC, Generic[HTTPRequestType, AsyncHTTPResponseType]): """An http policy ABC. """ def __init__(self) -> None: # next will be set once in the pipeline self.next = None # type: Optional[Union[AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType], AsyncHTTPSender[HTTPRequestType, AsyncHTTPResponseType]]] @abc.abstractmethod async def send(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: """Mutate the request. Context content is dependent of the HTTPSender. """ pass class _SansIOAsyncHTTPPolicyRunner(AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType]): """Async implementation of the SansIO policy. """ def __init__(self, policy: SansIOHTTPPolicy) -> None: super(_SansIOAsyncHTTPPolicyRunner, self).__init__() self._policy = policy async def send(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: self._policy.on_request(request, **kwargs) try: response = await self.next.send(request, **kwargs) # type: ignore except Exception: if not self._policy.on_exception(request, **kwargs): raise else: self._policy.on_response(request, response, **kwargs) return response class AsyncHTTPSender(AbstractAsyncContextManager, abc.ABC, Generic[HTTPRequestType, AsyncHTTPResponseType]): """An http sender ABC. """ @abc.abstractmethod async def send(self, request: Request[HTTPRequestType], **config: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: """Send the request using this HTTP sender. """ pass def build_context(self) -> Any: """Allow the sender to build a context that will be passed across the pipeline with the request. Return type has no constraints. Implementation is not required and None by default. """ return None def __enter__(self): raise TypeError("Use async with instead") def __exit__(self, exc_type, exc_val, exc_tb): # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover class AsyncPipeline(AbstractAsyncContextManager, Generic[HTTPRequestType, AsyncHTTPResponseType]): """A pipeline implementation. This is implemented as a context manager, that will activate the context of the HTTP sender. """ def __init__(self, policies: List[Union[AsyncHTTPPolicy, SansIOHTTPPolicy]] = None, sender: Optional[AsyncHTTPSender[HTTPRequestType, AsyncHTTPResponseType]] = None) -> None: self._impl_policies = [] # type: List[AsyncHTTPPolicy[HTTPRequestType, AsyncHTTPResponseType]] if sender: self._sender = sender else: # Import default only if nothing is provided from .aiohttp import AioHTTPSender self._sender = AioHTTPSender() for policy in (policies or []): if isinstance(policy, SansIOHTTPPolicy): self._impl_policies.append(_SansIOAsyncHTTPPolicyRunner(policy)) else: self._impl_policies.append(policy) for index in range(len(self._impl_policies)-1): self._impl_policies[index].next = self._impl_policies[index+1] if self._impl_policies: self._impl_policies[-1].next = self._sender def __enter__(self): raise TypeError("Use 'async with' instead") def __exit__(self, exc_type, exc_val, exc_tb): # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover async def __aenter__(self) -> 'AsyncPipeline': await self._sender.__aenter__() return self async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ await self._sender.__aexit__(*exc_details) async def run(self, request: Request, **kwargs: Any) -> Response[HTTPRequestType, AsyncHTTPResponseType]: context = self._sender.build_context() pipeline_request = Request(request, context) first_node = self._impl_policies[0] if self._impl_policies else self._sender return await first_node.send(pipeline_request, **kwargs) # type: ignore __all__ = [ 'AsyncHTTPPolicy', 'AsyncHTTPSender', 'AsyncPipeline', ]msrest-for-python-0.6.21/msrest/pipeline/async_requests.py000066400000000000000000000122201400412460500237550ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import asyncio from collections.abc import AsyncIterator import functools import logging from typing import Any, Callable, Optional, AsyncIterator as AsyncIteratorType from oauthlib import oauth2 import requests from requests.models import CONTENT_CHUNK_SIZE from ..exceptions import ( TokenExpiredError, ClientRequestError, raise_with_traceback ) from ..universal_http.async_requests import AsyncBasicRequestsHTTPSender from . import AsyncHTTPSender, AsyncHTTPPolicy, Response, Request from .requests import RequestsContext _LOGGER = logging.getLogger(__name__) class AsyncPipelineRequestsHTTPSender(AsyncHTTPSender): """Implements a basic Pipeline, that supports universal HTTP lib "requests" driver. """ def __init__(self, universal_http_requests_driver: Optional[AsyncBasicRequestsHTTPSender]=None) -> None: self.driver = universal_http_requests_driver or AsyncBasicRequestsHTTPSender() async def __aenter__(self) -> 'AsyncPipelineRequestsHTTPSender': await self.driver.__aenter__() return self async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ await self.driver.__aexit__(*exc_details) async def close(self): await self.__aexit__() def build_context(self): # type: () -> RequestsContext return RequestsContext( session=self.driver.session, ) async def send(self, request: Request, **kwargs) -> Response: """Send request object according to configuration. :param Request request: The request object to be sent. """ if request.context is None: # Should not happen, but make mypy happy and does not hurt request.context = self.build_context() if request.context.session is not self.driver.session: kwargs['session'] = request.context.session return Response( request, await self.driver.send(request.http_request, **kwargs) ) class AsyncRequestsCredentialsPolicy(AsyncHTTPPolicy): """Implementation of request-oauthlib except and retry logic. """ def __init__(self, credentials): super(AsyncRequestsCredentialsPolicy, self).__init__() self._creds = credentials async def send(self, request, **kwargs): session = request.context.session try: self._creds.signed_session(session) except TypeError: # Credentials does not support session injection _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") request.context.session = session = self._creds.signed_session() try: try: return await self.next.send(request, **kwargs) except (oauth2.rfc6749.errors.InvalidGrantError, oauth2.rfc6749.errors.TokenExpiredError) as err: error = "Token expired or is invalid. Attempting to refresh." _LOGGER.warning(error) try: try: self._creds.refresh_session(session) except TypeError: # Credentials does not support session injection _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") request.context.session = session = self._creds.refresh_session() return await self.next.send(request, **kwargs) except (oauth2.rfc6749.errors.InvalidGrantError, oauth2.rfc6749.errors.TokenExpiredError) as err: msg = "Token expired or is invalid." raise_with_traceback(TokenExpiredError, msg, err) except (requests.RequestException, oauth2.rfc6749.errors.OAuth2Error) as err: msg = "Error occurred in request." raise_with_traceback(ClientRequestError, msg, err) msrest-for-python-0.6.21/msrest/pipeline/requests.py000066400000000000000000000170461400412460500225730ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- """ This module is the requests implementation of Pipeline ABC """ from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 import contextlib import logging import threading from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import import warnings from oauthlib import oauth2 import requests from requests.models import CONTENT_CHUNK_SIZE from urllib3 import Retry # Needs requests 2.16 at least to be safe from ..exceptions import ( TokenExpiredError, ClientRequestError, raise_with_traceback ) from ..universal_http import ClientRequest from ..universal_http.requests import BasicRequestsHTTPSender from . import HTTPSender, HTTPPolicy, Response, Request _LOGGER = logging.getLogger(__name__) class RequestsCredentialsPolicy(HTTPPolicy): """Implementation of request-oauthlib except and retry logic. """ def __init__(self, credentials): super(RequestsCredentialsPolicy, self).__init__() self._creds = credentials def send(self, request, **kwargs): session = request.context.session try: self._creds.signed_session(session) except TypeError: # Credentials does not support session injection _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") request.context.session = session = self._creds.signed_session() try: try: return self.next.send(request, **kwargs) except (oauth2.rfc6749.errors.InvalidGrantError, oauth2.rfc6749.errors.TokenExpiredError) as err: error = "Token expired or is invalid. Attempting to refresh." _LOGGER.warning(error) try: try: self._creds.refresh_session(session) except TypeError: # Credentials does not support session injection _LOGGER.warning("Your credentials class does not support session injection. Performance will not be at the maximum.") request.context.session = session = self._creds.refresh_session() return self.next.send(request, **kwargs) except (oauth2.rfc6749.errors.InvalidGrantError, oauth2.rfc6749.errors.TokenExpiredError) as err: msg = "Token expired or is invalid." raise_with_traceback(TokenExpiredError, msg, err) except (requests.RequestException, oauth2.rfc6749.errors.OAuth2Error) as err: msg = "Error occurred in request." raise_with_traceback(ClientRequestError, msg, err) class RequestsPatchSession(HTTPPolicy): """Implements request level configuration that are actually to be done at the session level. This is highly deprecated, and is totally legacy. The pipeline structure allows way better design for this. """ _protocols = ['http://', 'https://'] def send(self, request, **kwargs): """Patch the current session with Request level operation config. This is deprecated, we shouldn't patch the session with arguments at the Request, and "config" should be used. """ session = request.context.session old_max_redirects = None if 'max_redirects' in kwargs: warnings.warn("max_redirects in operation kwargs is deprecated, use config.redirect_policy instead", DeprecationWarning) old_max_redirects = session.max_redirects session.max_redirects = int(kwargs['max_redirects']) old_trust_env = None if 'use_env_proxies' in kwargs: warnings.warn("use_env_proxies in operation kwargs is deprecated, use config.proxies instead", DeprecationWarning) old_trust_env = session.trust_env session.trust_env = bool(kwargs['use_env_proxies']) old_retries = {} if 'retries' in kwargs: warnings.warn("retries in operation kwargs is deprecated, use config.retry_policy instead", DeprecationWarning) max_retries = kwargs['retries'] for protocol in self._protocols: old_retries[protocol] = session.adapters[protocol].max_retries session.adapters[protocol].max_retries = max_retries try: return self.next.send(request, **kwargs) finally: if old_max_redirects: session.max_redirects = old_max_redirects if old_trust_env: session.trust_env = old_trust_env if old_retries: for protocol in self._protocols: session.adapters[protocol].max_retries = old_retries[protocol] class RequestsContext(object): def __init__(self, session): self.session = session class PipelineRequestsHTTPSender(HTTPSender): """Implements a basic Pipeline, that supports universal HTTP lib "requests" driver. """ def __init__(self, universal_http_requests_driver=None): # type: (Optional[BasicRequestsHTTPSender]) -> None self.driver = universal_http_requests_driver or BasicRequestsHTTPSender() def __enter__(self): # type: () -> PipelineRequestsHTTPSender self.driver.__enter__() return self def __exit__(self, *exc_details): # pylint: disable=arguments-differ self.driver.__exit__(*exc_details) def close(self): self.__exit__() def build_context(self): # type: () -> RequestsContext return RequestsContext( session=self.driver.session, ) def send(self, request, **kwargs): # type: (Request[ClientRequest], Any) -> Response """Send request object according to configuration. :param Request request: The request object to be sent. """ if request.context is None: # Should not happen, but make mypy happy and does not hurt request.context = self.build_context() if request.context.session is not self.driver.session: kwargs['session'] = request.context.session return Response( request, self.driver.send(request.http_request, **kwargs) ) msrest-for-python-0.6.21/msrest/pipeline/universal.py000066400000000000000000000233411400412460500227230ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- """ This module represents universal policy that works whatever the HTTPSender implementation """ import json import logging import os import xml.etree.ElementTree as ET import platform import codecs import re from typing import Mapping, Any, Optional, AnyStr, Union, IO, cast, TYPE_CHECKING # pylint: disable=unused-import from ..version import msrest_version as _msrest_version from . import SansIOHTTPPolicy from ..exceptions import DeserializationError, raise_with_traceback from ..http_logger import log_request, log_response if TYPE_CHECKING: from . import Request, Response # pylint: disable=unused-import _LOGGER = logging.getLogger(__name__) _BOM = codecs.BOM_UTF8.decode(encoding='utf-8') class HeadersPolicy(SansIOHTTPPolicy): """A simple policy that sends the given headers with the request. This overwrite any headers already defined in the request. """ def __init__(self, headers): # type: (Mapping[str, str]) -> None self.headers = headers def on_request(self, request, **kwargs): # type: (Request, Any) -> None http_request = request.http_request http_request.headers.update(self.headers) class UserAgentPolicy(SansIOHTTPPolicy): _USERAGENT = "User-Agent" _ENV_ADDITIONAL_USER_AGENT = 'AZURE_HTTP_USER_AGENT' def __init__(self, user_agent=None, overwrite=False): # type: (Optional[str], bool) -> None self._overwrite = overwrite if user_agent is None: self._user_agent = "python/{} ({}) msrest/{}".format( platform.python_version(), platform.platform(), _msrest_version ) else: self._user_agent = user_agent # Whatever you gave me a header explicitly or not, # if the env variable is set, add to it. add_user_agent_header = os.environ.get(self._ENV_ADDITIONAL_USER_AGENT, None) if add_user_agent_header is not None: self.add_user_agent(add_user_agent_header) @property def user_agent(self): # type: () -> str """The current user agent value.""" return self._user_agent def add_user_agent(self, value): # type: (str) -> None """Add value to current user agent with a space. :param str value: value to add to user agent. """ self._user_agent = "{} {}".format(self._user_agent, value) def on_request(self, request, **kwargs): # type: (Request, Any) -> None http_request = request.http_request if self._overwrite or self._USERAGENT not in http_request.headers: http_request.headers[self._USERAGENT] = self._user_agent class HTTPLogger(SansIOHTTPPolicy): """A policy that logs HTTP request and response to the DEBUG logger. This accepts both global configuration, and kwargs request level with "enable_http_logger" """ def __init__(self, enable_http_logger = False): self.enable_http_logger = enable_http_logger def on_request(self, request, **kwargs): # type: (Request, Any) -> None http_request = request.http_request if kwargs.get("enable_http_logger", self.enable_http_logger): log_request(None, http_request) def on_response(self, request, response, **kwargs): # type: (Request, Response, Any) -> None http_request = request.http_request if kwargs.get("enable_http_logger", self.enable_http_logger): log_response(None, http_request, response.http_response, result=response) class RawDeserializer(SansIOHTTPPolicy): # Accept "text" because we're open minded people... JSON_REGEXP = re.compile(r'^(application|text)/([a-z+.]+\+)?json$') # Name used in context CONTEXT_NAME = "deserialized_data" @classmethod def deserialize_from_text(cls, data, content_type=None): # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any """Decode data according to content-type. Accept a stream of data as well, but will be load at once in memory for now. If no content-type, will return the string version (not bytes, not stream) :param data: Input, could be bytes or stream (will be decoded with UTF8) or text :type data: str or bytes or IO :param str content_type: The content type. """ if hasattr(data, 'read'): # Assume a stream data = cast(IO, data).read() if isinstance(data, bytes): data_as_str = data.decode(encoding='utf-8-sig') else: # Explain to mypy the correct type. data_as_str = cast(str, data) # Remove Byte Order Mark if present in string data_as_str = data_as_str.lstrip(_BOM) if content_type is None: return data if cls.JSON_REGEXP.match(content_type): try: return json.loads(data_as_str) except ValueError as err: raise DeserializationError("JSON is invalid: {}".format(err), err) elif "xml" in (content_type or []): try: try: if isinstance(data, unicode): # type: ignore # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore except NameError: pass return ET.fromstring(data_as_str) except ET.ParseError: # It might be because the server has an issue, and returned JSON with # content-type XML.... # So let's try a JSON load, and if it's still broken # let's flow the initial exception def _json_attemp(data): try: return True, json.loads(data) except ValueError: return False, None # Don't care about this one success, json_result = _json_attemp(data) if success: return json_result # If i'm here, it's not JSON, it's not XML, let's scream # and raise the last context in this block (the XML exception) # The function hack is because Py2.7 messes up with exception # context otherwise. _LOGGER.critical("Wasn't XML not JSON, failing") raise_with_traceback(DeserializationError, "XML is invalid") raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) @classmethod def deserialize_from_http_generics(cls, body_bytes, headers): # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any """Deserialize from HTTP response. Use bytes and headers to NOT use any requests/aiohttp or whatever specific implementation. Headers will tested for "content-type" """ # Try to use content-type from headers if available content_type = None if 'content-type' in headers: content_type = headers['content-type'].split(";")[0].strip().lower() # Ouch, this server did not declare what it sent... # Let's guess it's JSON... # Also, since Autorest was considering that an empty body was a valid JSON, # need that test as well.... else: content_type = "application/json" if body_bytes: return cls.deserialize_from_text(body_bytes, content_type) return None def on_response(self, request, response, **kwargs): # type: (Request, Response, Any) -> None """Extract data from the body of a REST response object. This will load the entire payload in memory. Will follow Content-Type to parse. We assume everything is UTF8 (BOM acceptable). :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 :raises xml.etree.ElementTree.ParseError: If bytes is not valid XML """ # If response was asked as stream, do NOT read anything and quit now if kwargs.get("stream", True): return http_response = response.http_response response.context[self.CONTEXT_NAME] = self.deserialize_from_http_generics( http_response.text(), http_response.headers ) msrest-for-python-0.6.21/msrest/polling/000077500000000000000000000000001400412460500201755ustar00rootroot00000000000000msrest-for-python-0.6.21/msrest/polling/__init__.py000066400000000000000000000031751400412460500223140ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import sys from .poller import LROPoller, NoPolling, PollingMethod __all__ = ['LROPoller', 'NoPolling', 'PollingMethod'] if sys.version_info >= (3, 5, 2): # Not executed on old Python, no syntax error from .async_poller import AsyncNoPolling, AsyncPollingMethod, async_poller __all__ += ['AsyncNoPolling', 'AsyncPollingMethod', 'async_poller'] msrest-for-python-0.6.21/msrest/polling/async_poller.py000066400000000000000000000074411400412460500232470ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from .poller import NoPolling as _NoPolling from ..serialization import Model from ..async_client import ServiceClientAsync from ..pipeline import ClientRawResponse class AsyncPollingMethod(object): """ABC class for polling method. """ def initialize(self, client, initial_response, deserialization_callback): raise NotImplementedError("This method needs to be implemented") async def run(self): raise NotImplementedError("This method needs to be implemented") def status(self): raise NotImplementedError("This method needs to be implemented") def finished(self): raise NotImplementedError("This method needs to be implemented") def resource(self): raise NotImplementedError("This method needs to be implemented") class AsyncNoPolling(_NoPolling): """An empty async poller that returns the deserialized initial response. """ async def run(self): """Empty run, no polling. Just override initial run to add "async" """ pass async def async_poller(client, initial_response, deserialization_callback, polling_method): """Async Poller for long running operations. :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. :type client: msrest.service_client.ServiceClient :param initial_response: The initial call response :type initial_response: msrest.universal_http.ClientResponse or msrest.pipeline.ClientRawResponse :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. :type deserialization_callback: callable or msrest.serialization.Model :param polling_method: The polling strategy to adopt :type polling_method: msrest.polling.PollingMethod """ try: client = client if isinstance(client, ServiceClientAsync) else client._client except AttributeError: raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): deserialization_callback = deserialization_callback.deserialize # Might raise a CloudError polling_method.initialize(client, response, deserialization_callback) await polling_method.run() return polling_method.resource() msrest-for-python-0.6.21/msrest/polling/poller.py000066400000000000000000000215251400412460500220510ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import threading import time import uuid try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse from typing import Any, Callable, Union, List, Optional, TYPE_CHECKING if TYPE_CHECKING: import requests from ..serialization import Model from ..service_client import ServiceClient from ..pipeline import ClientRawResponse class PollingMethod(object): """ABC class for polling method. """ def initialize(self, client, initial_response, deserialization_callback): # type: (Any, Any, Any) -> None raise NotImplementedError("This method needs to be implemented") def run(self): # type: () -> None raise NotImplementedError("This method needs to be implemented") def status(self): # type: () -> str raise NotImplementedError("This method needs to be implemented") def finished(self): # type: () -> bool raise NotImplementedError("This method needs to be implemented") def resource(self): # type: () -> Any raise NotImplementedError("This method needs to be implemented") class NoPolling(PollingMethod): """An empty poller that returns the deserialized initial response. """ def __init__(self): self._initial_response = None self._deserialization_callback = None def initialize(self, _, initial_response, deserialization_callback): # type: (Any, requests.Response, Callable) -> None self._initial_response = initial_response self._deserialization_callback = deserialization_callback def run(self): # type: () -> None """Empty run, no polling. """ pass def status(self): # type: () -> str """Return the current status as a string. :rtype: str """ return "succeeded" def finished(self): # type: () -> bool """Is this polling finished? :rtype: bool """ return True def resource(self): # type: () -> Any return self._deserialization_callback(self._initial_response) class LROPoller(object): """Poller for long running operations. :param client: A msrest service client. Can be a SDK client and it will be casted to a ServiceClient. :type client: msrest.service_client.ServiceClient :param initial_response: The initial call response :type initial_response: requests.Response or msrest.pipeline.ClientRawResponse :param deserialization_callback: A callback that takes a Response and return a deserialized object. If a subclass of Model is given, this passes "deserialize" as callback. :type deserialization_callback: callable or msrest.serialization.Model :param polling_method: The polling strategy to adopt :type polling_method: msrest.polling.PollingMethod """ def __init__(self, client, initial_response, deserialization_callback, polling_method): # type: (Any, Union[ClientRawResponse, requests.Response], Union[Model, Callable[[requests.Response], Model]], PollingMethod) -> None try: self._client = client if isinstance(client, ServiceClient) else client._client # type: ServiceClient except AttributeError: raise ValueError("Poller client parameter must be a low-level msrest Service Client or a SDK client.") self._response = initial_response.response if isinstance(initial_response, ClientRawResponse) else initial_response self._callbacks = [] # type: List[Callable] self._polling_method = polling_method if isinstance(deserialization_callback, type) and issubclass(deserialization_callback, Model): deserialization_callback = deserialization_callback.deserialize # type: ignore # Might raise a CloudError self._polling_method.initialize(self._client, self._response, deserialization_callback) # Prepare thread execution self._thread = None self._done = None self._exception = None if not self._polling_method.finished(): self._done = threading.Event() self._thread = threading.Thread( target=self._start, name="LROPoller({})".format(uuid.uuid4())) self._thread.daemon = True self._thread.start() def _start(self): """Start the long running operation. On completion, runs any callbacks. :param callable update_cmd: The API request to check the status of the operation. """ try: self._polling_method.run() except Exception as err: self._exception = err finally: self._done.set() callbacks, self._callbacks = self._callbacks, [] while callbacks: for call in callbacks: call(self._polling_method) callbacks, self._callbacks = self._callbacks, [] def status(self): # type: () -> str """Returns the current status string. :returns: The current status string :rtype: str """ return self._polling_method.status() def result(self, timeout=None): # type: (Optional[int]) -> Model """Return the result of the long running operation, or the result available after the specified timeout. :returns: The deserialized resource of the long running operation, if one is available. :raises CloudError: Server problem with the query. """ self.wait(timeout) return self._polling_method.resource() def wait(self, timeout=None): # type: (Optional[int]) -> None """Wait on the long running operation for a specified length of time. You can check if this call as ended with timeout with the "done()" method. :param int timeout: Period of time to wait for the long running operation to complete (in seconds). :raises CloudError: Server problem with the query. """ if self._thread is None: return self._thread.join(timeout=timeout) try: # Let's handle possible None in forgiveness here raise self._exception # type: ignore except TypeError: # Was None pass def done(self): # type: () -> bool """Check status of the long running operation. :returns: 'True' if the process has completed, else 'False'. """ return self._thread is None or not self._thread.is_alive() def add_done_callback(self, func): # type: (Callable) -> None """Add callback function to be run once the long running operation has completed - regardless of the status of the operation. :param callable func: Callback function that takes at least one argument, a completed LongRunningOperation. """ # Still use "_done" and not "done", since CBs are executed inside the thread. if self._done is None or self._done.is_set(): func(self._polling_method) # Let's add them still, for consistency (if you wish to access to it for some reasons) self._callbacks.append(func) def remove_done_callback(self, func): # type: (Callable) -> None """Remove a callback from the long running operation. :param callable func: The function to be removed from the callbacks. :raises: ValueError if the long running operation has already completed. """ if self._done is None or self._done.is_set(): raise ValueError("Process is complete.") self._callbacks = [c for c in self._callbacks if c != func] msrest-for-python-0.6.21/msrest/py.typed000066400000000000000000000000321400412460500202230ustar00rootroot00000000000000# Marker file for PEP 561.msrest-for-python-0.6.21/msrest/serialization.py000066400000000000000000002312101400412460500217570ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from base64 import b64decode, b64encode import calendar import datetime import decimal import email from enum import Enum import json import logging import re import sys try: from urllib import quote # type: ignore except ImportError: from urllib.parse import quote # type: ignore import xml.etree.ElementTree as ET import isodate from typing import Dict, Any from .exceptions import ( ValidationError, SerializationError, DeserializationError, raise_with_traceback) try: basestring # type: ignore unicode_str = unicode # type: ignore except NameError: basestring = str # type: ignore unicode_str = str # type: ignore _LOGGER = logging.getLogger(__name__) try: _long_type = long # type: ignore except NameError: _long_type = int class UTC(datetime.tzinfo): """Time Zone info for handling UTC""" def utcoffset(self, dt): """UTF offset for UTC is 0.""" return datetime.timedelta(0) def tzname(self, dt): """Timestamp representation.""" return "Z" def dst(self, dt): """No daylight saving for UTC.""" return datetime.timedelta(hours=1) try: from datetime import timezone as _FixedOffset except ImportError: # Python 2.7 class _FixedOffset(datetime.tzinfo): # type: ignore """Fixed offset in minutes east from UTC. Copy/pasted from Python doc :param datetime.timedelta offset: offset in timedelta format """ def __init__(self, offset): self.__offset = offset def utcoffset(self, dt): return self.__offset def tzname(self, dt): return str(self.__offset.total_seconds()/3600) def __repr__(self): return "".format(self.tzname(None)) def dst(self, dt): return datetime.timedelta(0) def __getinitargs__(self): return (self.__offset,) try: from datetime import timezone TZ_UTC = timezone.utc # type: ignore except ImportError: TZ_UTC = UTC() # type: ignore _FLATTEN = re.compile(r"(? y, "minimum": lambda x, y: x < y, "maximum": lambda x, y: x > y, "minimum_ex": lambda x, y: x <= y, "maximum_ex": lambda x, y: x >= y, "min_items": lambda x, y: len(x) < y, "max_items": lambda x, y: len(x) > y, "pattern": lambda x, y: not re.match(y, x, re.UNICODE), "unique": lambda x, y: len(x) != len(set(x)), "multiple": lambda x, y: x % y != 0 } def __init__(self, classes=None): self.serialize_type = { 'iso-8601': Serializer.serialize_iso, 'rfc-1123': Serializer.serialize_rfc, 'unix-time': Serializer.serialize_unix, 'duration': Serializer.serialize_duration, 'date': Serializer.serialize_date, 'time': Serializer.serialize_time, 'decimal': Serializer.serialize_decimal, 'long': Serializer.serialize_long, 'bytearray': Serializer.serialize_bytearray, 'base64': Serializer.serialize_base64, 'object': self.serialize_object, '[]': self.serialize_iter, '{}': self.serialize_dict } self.dependencies = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True def _serialize(self, target_obj, data_type=None, **kwargs): """Serialize data into a string according to type. :param target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict :raises: SerializationError if serialization fails. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) keep_readonly = kwargs.get("keep_readonly", False) if target_obj is None: return None attr_name = None class_name = target_obj.__class__.__name__ if data_type: return self.serialize_data( target_obj, data_type, **kwargs) if not hasattr(target_obj, "_attribute_map"): data_type = type(target_obj).__name__ if data_type in self.basic_types.values(): return self.serialize_data( target_obj, data_type, **kwargs) # Force "is_xml" kwargs if we detect a XML model try: is_xml_model_serialization = kwargs["is_xml"] except KeyError: is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) serialized = {} if is_xml_model_serialization: serialized = target_obj._create_xml_node() try: attributes = target_obj._attribute_map for attr, attr_desc in attributes.items(): attr_name = attr if not keep_readonly and target_obj._validation.get(attr_name, {}).get('readonly', False): continue if attr_name == "additional_properties" and attr_desc["key"] == '': if target_obj.additional_properties is not None: serialized.update(target_obj.additional_properties) continue try: ### Extract sub-data to serialize from model ### orig_attr = getattr(target_obj, attr) if is_xml_model_serialization: pass # Don't provide "transformer" for XML for now. Keep "orig_attr" else: # JSON keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) keys = keys if isinstance(keys, list) else [keys] ### Serialize this data ### kwargs["serialization_ctxt"] = attr_desc new_attr = self.serialize_data(orig_attr, attr_desc['type'], **kwargs) ### Incorporate this data in the right place ### if is_xml_model_serialization: xml_desc = attr_desc.get('xml', {}) xml_name = xml_desc.get('name', attr_desc['key']) xml_prefix = xml_desc.get('prefix', None) xml_ns = xml_desc.get('ns', None) if xml_desc.get("attr", False): if xml_ns: ET.register_namespace(xml_prefix, xml_ns) xml_name = "{{{}}}{}".format(xml_ns, xml_name) serialized.set(xml_name, new_attr) continue if xml_desc.get("text", False): serialized.text = new_attr continue if isinstance(new_attr, list): serialized.extend(new_attr) elif isinstance(new_attr, ET.Element): # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. if 'name' not in getattr(orig_attr, '_xml_map', {}): splitted_tag = new_attr.tag.split("}") if len(splitted_tag) == 2: # Namespace new_attr.tag = "}".join([splitted_tag[0], xml_name]) else: new_attr.tag = xml_name serialized.append(new_attr) else: # That's a basic type # Integrate namespace if necessary local_node = _create_xml_node( xml_name, xml_prefix, xml_ns ) local_node.text = unicode_str(new_attr) serialized.append(local_node) else: # JSON for k in reversed(keys): unflattened = {k: new_attr} new_attr = unflattened _new_attr = new_attr _serialized = serialized for k in keys: if k not in _serialized: _serialized.update(_new_attr) _new_attr = _new_attr[k] _serialized = _serialized[k] except ValueError: continue except (AttributeError, KeyError, TypeError) as err: msg = "Attribute {} in object {} cannot be serialized.\n{}".format( attr_name, class_name, str(target_obj)) raise_with_traceback(SerializationError, msg, err) else: return serialized def body(self, data, data_type, **kwargs): """Serialize data intended for a request body. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None """ if data is None: raise ValidationError("required", "body", True) # Just in case this is a dict internal_data_type = data_type.strip('[]{}') internal_data_type = self.dependencies.get(internal_data_type, None) try: is_xml_model_serialization = kwargs["is_xml"] except KeyError: if internal_data_type and issubclass(internal_data_type, Model): is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) else: is_xml_model_serialization = False if internal_data_type and not isinstance(internal_data_type, Enum): try: deserializer = Deserializer(self.dependencies) # Since it's on serialization, it's almost sure that format is not JSON REST # We're not able to deal with additional properties for now. deserializer.additional_properties_detection = False if is_xml_model_serialization: deserializer.key_extractors = [ attribute_key_case_insensitive_extractor, ] else: deserializer.key_extractors = [ rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor ] data = deserializer._deserialize(data_type, data) except DeserializationError as err: raise_with_traceback( SerializationError, "Unable to build a model: "+str(err), err) if self.client_side_validation: errors = _recursive_validate(data_type, data_type, data) if errors: raise errors[0] return self._serialize(data, data_type, **kwargs) def _http_component_validation(self, data, data_type, name, **kwargs): if self.client_side_validation: # https://github.com/Azure/msrest-for-python/issues/85 if data is not None and data_type in self.basic_types.values(): data = self.serialize_basic(data, data_type, **kwargs) data = self.validate(data, name, required=True, **kwargs) return data def url(self, name, data, data_type, **kwargs): """Serialize data intended for a URL path. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None """ data = self._http_component_validation(data, data_type, name, **kwargs) try: output = self.serialize_data(data, data_type, **kwargs) if data_type == 'bool': output = json.dumps(output) if kwargs.get('skip_quote') is True: output = str(output) else: output = quote(str(output), safe='') except SerializationError: raise TypeError("{} must be type {}.".format(name, data_type)) else: return output def query(self, name, data, data_type, **kwargs): """Serialize data intended for a URL query. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None """ data = self._http_component_validation(data, data_type, name, **kwargs) try: # Treat the list aside, since we don't want to encode the div separator if data_type.startswith("["): internal_data_type = data_type[1:-1] data = [ self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data ] if not kwargs.get('skip_quote', False): data = [ quote(str(d), safe='') for d in data ] return str(self.serialize_iter(data, internal_data_type, **kwargs)) # Not a list, regular serialization output = self.serialize_data(data, data_type, **kwargs) if data_type == 'bool': output = json.dumps(output) if kwargs.get('skip_quote') is True: output = str(output) else: output = quote(str(output), safe='') except SerializationError: raise TypeError("{} must be type {}.".format(name, data_type)) else: return str(output) def header(self, name, data, data_type, **kwargs): """Serialize data intended for a request header. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str :raises: TypeError if serialization fails. :raises: ValueError if data is None """ data = self._http_component_validation(data, data_type, name, **kwargs) try: if data_type in ['[str]']: data = ["" if d is None else d for d in data] output = self.serialize_data(data, data_type, **kwargs) if data_type == 'bool': output = json.dumps(output) except SerializationError: raise TypeError("{} must be type {}.".format(name, data_type)) else: return str(output) @classmethod def validate(cls, data, name, **kwargs): """Validate that a piece of data meets certain conditions""" required = kwargs.get('required', False) if required and data is None: raise ValidationError("required", name, True) elif data is None: return elif kwargs.get('readonly'): return try: for key, value in kwargs.items(): validator = cls.validation.get(key, lambda x, y: False) if validator(data, value): raise ValidationError(key, name, value) except TypeError: raise ValidationError("unknown", name, "unknown") else: return data def serialize_data(self, data, data_type, **kwargs): """Serialize generic data according to supplied data type. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :param bool required: Whether it's essential that the data not be empty or None :raises: AttributeError if required data is None. :raises: ValueError if data is None :raises: SerializationError if serialization fails. """ if data is None: raise ValueError("No value for given attribute") try: if data_type in self.basic_types.values(): return self.serialize_basic(data, data_type, **kwargs) elif data_type in self.serialize_type: return self.serialize_type[data_type](data, **kwargs) # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway enum_type = self.dependencies.get(data_type, data.__class__) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) iter_type = data_type[0] + data_type[-1] if iter_type in self.serialize_type: return self.serialize_type[iter_type]( data, data_type[1:-1], **kwargs) except (ValueError, TypeError) as err: msg = "Unable to serialize value: {!r} as type: {!r}." raise_with_traceback( SerializationError, msg.format(data, data_type), err) else: return self._serialize(data, **kwargs) @classmethod def _get_custom_serializers(cls, data_type, **kwargs): custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) if custom_serializer: return custom_serializer if kwargs.get("is_xml", False): return cls._xml_basic_types_serializers.get(data_type) @classmethod def serialize_basic(cls, data, data_type, **kwargs): """Serialize basic builting data type. Serializes objects to str, int, float or bool. Possible kwargs: - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - is_xml bool : If set, use xml_basic_types_serializers :param data: Object to be serialized. :param str data_type: Type of object in the iterable. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == 'str': return cls.serialize_unicode(data) return eval(data_type)(data) @classmethod def serialize_unicode(cls, data): """Special handling for serializing unicode strings in Py2. Encode to UTF-8 if unicode, otherwise handle as a str. :param data: Object to be serialized. :rtype: str """ try: # If I received an enum, return its value return data.value except AttributeError: pass try: if isinstance(data, unicode): # Don't change it, JSON and XML ElementTree are totally able # to serialize correctly u'' strings return data except NameError: return str(data) else: return str(data) def serialize_iter(self, data, iter_type, div=None, **kwargs): """Serialize iterable. Supported kwargs: - serialization_ctxt dict : The current entry of _attribute_map, or same format. serialization_ctxt['type'] should be same as data_type. - is_xml bool : If set, serialize as XML :param list attr: Object to be serialized. :param str iter_type: Type of object in the iterable. :param bool required: Whether the objects in the iterable must not be None or empty. :param str div: If set, this str will be used to combine the elements in the iterable into a combined string. Default is 'None'. :rtype: list, str """ if isinstance(data, str): raise SerializationError("Refuse str type as a valid iter type.") serialization_ctxt = kwargs.get("serialization_ctxt", {}) is_xml = kwargs.get("is_xml", False) serialized = [] for d in data: try: serialized.append(self.serialize_data(d, iter_type, **kwargs)) except ValueError: serialized.append(None) if div: serialized = ['' if s is None else str(s) for s in serialized] serialized = div.join(serialized) if 'xml' in serialization_ctxt or is_xml: # XML serialization is more complicated xml_desc = serialization_ctxt.get('xml', {}) xml_name = xml_desc.get('name') if not xml_name: xml_name = serialization_ctxt['key'] # Create a wrap node if necessary (use the fact that Element and list have "append") is_wrapped = xml_desc.get("wrapped", False) node_name = xml_desc.get("itemsName", xml_name) if is_wrapped: final_result = _create_xml_node( xml_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) else: final_result = [] # All list elements to "local_node" for el in serialized: if isinstance(el, ET.Element): el_node = el else: el_node = _create_xml_node( node_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) if el is not None: # Otherwise it writes "None" :-p el_node.text = str(el) final_result.append(el_node) return final_result return serialized def serialize_dict(self, attr, dict_type, **kwargs): """Serialize a dictionary of objects. :param dict attr: Object to be serialized. :param str dict_type: Type of object in the dictionary. :param bool required: Whether the objects in the dictionary must not be None or empty. :rtype: dict """ serialization_ctxt = kwargs.get("serialization_ctxt", {}) serialized = {} for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_data( value, dict_type, **kwargs) except ValueError: serialized[self.serialize_unicode(key)] = None if 'xml' in serialization_ctxt: # XML serialization is more complicated xml_desc = serialization_ctxt['xml'] xml_name = xml_desc['name'] final_result = _create_xml_node( xml_name, xml_desc.get('prefix', None), xml_desc.get('ns', None) ) for key, value in serialized.items(): ET.SubElement(final_result, key).text = value return final_result return serialized def serialize_object(self, attr, **kwargs): """Serialize a generic object. This will be handled as a dictionary. If object passed in is not a basic type (str, int, float, dict, list) it will simply be cast to str. :param dict attr: Object to be serialized. :rtype: dict or str """ if attr is None: return None if isinstance(attr, ET.Element): return attr obj_type = type(attr) if obj_type in self.basic_types: return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) if obj_type is _long_type: return self.serialize_long(attr) if obj_type is unicode_str: return self.serialize_unicode(attr) if obj_type is datetime.datetime: return self.serialize_iso(attr) if obj_type is datetime.date: return self.serialize_date(attr) if obj_type is datetime.time: return self.serialize_time(attr) if obj_type is datetime.timedelta: return self.serialize_duration(attr) if obj_type is decimal.Decimal: return self.serialize_decimal(attr) # If it's a model or I know this dependency, serialize as a Model elif obj_type in self.dependencies.values() or isinstance(attr, Model): return self._serialize(attr) if obj_type == dict: serialized = {} for key, value in attr.items(): try: serialized[self.serialize_unicode(key)] = self.serialize_object( value, **kwargs) except ValueError: serialized[self.serialize_unicode(key)] = None return serialized if obj_type == list: serialized = [] for obj in attr: try: serialized.append(self.serialize_object( obj, **kwargs)) except ValueError: pass return serialized return str(attr) @staticmethod def serialize_enum(attr, enum_obj=None): try: result = attr.value except AttributeError: result = attr try: enum_obj(result) return result except ValueError: for enum_value in enum_obj: if enum_value.value.lower() == str(attr).lower(): return enum_value.value error = "{!r} is not valid value for enum {!r}" raise SerializationError(error.format(attr, enum_obj)) @staticmethod def serialize_bytearray(attr, **kwargs): """Serialize bytearray into base-64 string. :param attr: Object to be serialized. :rtype: str """ return b64encode(attr).decode() @staticmethod def serialize_base64(attr, **kwargs): """Serialize str into base-64 string. :param attr: Object to be serialized. :rtype: str """ encoded = b64encode(attr).decode('ascii') return encoded.strip('=').replace('+', '-').replace('/', '_') @staticmethod def serialize_decimal(attr, **kwargs): """Serialize Decimal object to float. :param attr: Object to be serialized. :rtype: float """ return float(attr) @staticmethod def serialize_long(attr, **kwargs): """Serialize long (Py2) or int (Py3). :param attr: Object to be serialized. :rtype: int/long """ return _long_type(attr) @staticmethod def serialize_date(attr, **kwargs): """Serialize Date object into ISO-8601 formatted string. :param Date attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_date(attr) t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) return t @staticmethod def serialize_time(attr, **kwargs): """Serialize Time object into ISO-8601 formatted string. :param datetime.time attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_time(attr) t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) if attr.microsecond: t += ".{:02}".format(attr.microsecond) return t @staticmethod def serialize_duration(attr, **kwargs): """Serialize TimeDelta object into ISO-8601 formatted string. :param TimeDelta attr: Object to be serialized. :rtype: str """ if isinstance(attr, str): attr = isodate.parse_duration(attr) return isodate.duration_isoformat(attr) @staticmethod def serialize_rfc(attr, **kwargs): """Serialize Datetime object into RFC-1123 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: TypeError if format invalid. """ try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() except AttributeError: raise TypeError("RFC1123 object must be valid Datetime object.") return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( Serializer.days[utc.tm_wday], utc.tm_mday, Serializer.months[utc.tm_mon], utc.tm_year, utc.tm_hour, utc.tm_min, utc.tm_sec) @staticmethod def serialize_iso(attr, **kwargs): """Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: SerializationError if format invalid. """ if isinstance(attr, str): attr = isodate.parse_datetime(attr) try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") utc = attr.utctimetuple() if utc.tm_year > 9999 or utc.tm_year < 1: raise OverflowError("Hit max or min date") microseconds = str(attr.microsecond).rjust(6,'0').rstrip('0').ljust(3, '0') if microseconds: microseconds = '.'+microseconds date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec) return date + microseconds + 'Z' except (ValueError, OverflowError) as err: msg = "Unable to serialize datetime object." raise_with_traceback(SerializationError, msg, err) except AttributeError as err: msg = "ISO-8601 object must be valid Datetime object." raise_with_traceback(TypeError, msg, err) @staticmethod def serialize_unix(attr, **kwargs): """Serialize Datetime object into IntTime format. This is represented as seconds. :param Datetime attr: Object to be serialized. :rtype: int :raises: SerializationError if format invalid """ if isinstance(attr, int): return attr try: if not attr.tzinfo: _LOGGER.warning( "Datetime with no tzinfo will be considered UTC.") return int(calendar.timegm(attr.utctimetuple())) except AttributeError: raise TypeError("Unix time object must be valid Datetime object.") def rest_key_extractor(attr, attr_desc, data): key = attr_desc['key'] working_data = data while '.' in key: dict_keys = _FLATTEN.split(key) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break working_key = _decode_attribute_map_key(dict_keys[0]) working_data = working_data.get(working_key, data) if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well # https://github.com/Azure/msrest-for-python/issues/197 return None key = '.'.join(dict_keys[1:]) return working_data.get(key) def rest_key_case_insensitive_extractor(attr, attr_desc, data): key = attr_desc['key'] working_data = data while '.' in key: dict_keys = _FLATTEN.split(key) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break working_key = _decode_attribute_map_key(dict_keys[0]) working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) if working_data is None: # If at any point while following flatten JSON path see None, it means # that all properties under are None as well # https://github.com/Azure/msrest-for-python/issues/197 return None key = '.'.join(dict_keys[1:]) if working_data: return attribute_key_case_insensitive_extractor(key, None, working_data) def last_rest_key_extractor(attr, attr_desc, data): """Extract the attribute in "data" based on the last part of the JSON path key. """ key = attr_desc['key'] dict_keys = _FLATTEN.split(key) return attribute_key_extractor(dict_keys[-1], None, data) def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): """Extract the attribute in "data" based on the last part of the JSON path key. This is the case insensitive version of "last_rest_key_extractor" """ key = attr_desc['key'] dict_keys = _FLATTEN.split(key) return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) def attribute_key_extractor(attr, _, data): return data.get(attr) def attribute_key_case_insensitive_extractor(attr, _, data): found_key = None lower_attr = attr.lower() for key in data: if lower_attr == key.lower(): found_key = key break return data.get(found_key) def _extract_name_from_internal_type(internal_type): """Given an internal type XML description, extract correct XML name with namespace. :param dict internal_type: An model type :rtype: tuple :returns: A tuple XML name + namespace dict """ internal_type_xml_map = getattr(internal_type, "_xml_map", {}) xml_name = internal_type_xml_map.get('name', internal_type.__name__) xml_ns = internal_type_xml_map.get("ns", None) if xml_ns: xml_name = "{{{}}}{}".format(xml_ns, xml_name) return xml_name def xml_key_extractor(attr, attr_desc, data): if isinstance(data, dict): return None # Test if this model is XML ready first if not isinstance(data, ET.Element): return None xml_desc = attr_desc.get('xml', {}) xml_name = xml_desc.get('name', attr_desc['key']) # Look for a children is_iter_type = attr_desc['type'].startswith("[") is_wrapped = xml_desc.get("wrapped", False) internal_type = attr_desc.get("internalType", None) internal_type_xml_map = getattr(internal_type, "_xml_map", {}) # Integrate namespace if necessary xml_ns = xml_desc.get('ns', internal_type_xml_map.get("ns", None)) if xml_ns: xml_name = "{{{}}}{}".format(xml_ns, xml_name) # If it's an attribute, that's simple if xml_desc.get("attr", False): return data.get(xml_name) # If it's x-ms-text, that's simple too if xml_desc.get("text", False): return data.text # Scenario where I take the local name: # - Wrapped node # - Internal type is an enum (considered basic types) # - Internal type has no XML/Name node if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or 'name' not in internal_type_xml_map)): children = data.findall(xml_name) # If internal type has a local name and it's not a list, I use that name elif not is_iter_type and internal_type and 'name' in internal_type_xml_map: xml_name = _extract_name_from_internal_type(internal_type) children = data.findall(xml_name) # That's an array else: if internal_type: # Complex type, ignore itemsName and use the complex type name items_name = _extract_name_from_internal_type(internal_type) else: items_name = xml_desc.get("itemsName", xml_name) children = data.findall(items_name) if len(children) == 0: if is_iter_type: if is_wrapped: return None # is_wrapped no node, we want None else: return [] # not wrapped, assume empty list return None # Assume it's not there, maybe an optional node. # If is_iter_type and not wrapped, return all found children if is_iter_type: if not is_wrapped: return children else: # Iter and wrapped, should have found one node only (the wrap one) if len(children) != 1: raise DeserializationError( "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( xml_name )) return list(children[0]) # Might be empty list and that's ok. # Here it's not a itertype, we should have found one element only or empty if len(children) > 1: raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) return children[0] class Deserializer(object): """Response object model deserializer. :param dict classes: Class type dictionary for deserializing complex types. :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. """ basic_types = {str: 'str', int: 'int', bool: 'bool', float: 'float'} valid_date = re.compile( r'\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}' r'\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?') def __init__(self, classes=None): self.deserialize_type = { 'iso-8601': Deserializer.deserialize_iso, 'rfc-1123': Deserializer.deserialize_rfc, 'unix-time': Deserializer.deserialize_unix, 'duration': Deserializer.deserialize_duration, 'date': Deserializer.deserialize_date, 'time': Deserializer.deserialize_time, 'decimal': Deserializer.deserialize_decimal, 'long': Deserializer.deserialize_long, 'bytearray': Deserializer.deserialize_bytearray, 'base64': Deserializer.deserialize_base64, 'object': self.deserialize_object, '[]': self.deserialize_iter, '{}': self.deserialize_dict } self.deserialize_expected_types = { 'duration': (isodate.Duration, datetime.timedelta), 'iso-8601': (datetime.datetime) } self.dependencies = dict(classes) if classes else {} self.key_extractors = [ rest_key_extractor, xml_key_extractor ] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much # complicated, with no real scenario for now. # So adding a flag to disable additional properties detection. This flag should be # used if your expect the deserialization to NOT come from a JSON REST syntax. # Otherwise, result are unexpected self.additional_properties_detection = True def __call__(self, target_obj, response_data, content_type=None): """Call the deserializer to process a REST response. :param str target_obj: Target data type to deserialize to. :param requests.Response response_data: REST response object. :param str content_type: Swagger "produces" if available. :raises: DeserializationError if deserialization fails. :return: Deserialized object. """ data = self._unpack_content(response_data, content_type) return self._deserialize(target_obj, data) def _deserialize(self, target_obj, data): """Call the deserializer on a model. Data needs to be already deserialized as JSON or XML ElementTree :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. :raises: DeserializationError if deserialization fails. :return: Deserialized object. """ # This is already a model, go recursive just in case if hasattr(data, "_attribute_map"): constants = [name for name, config in getattr(data, '_validation', {}).items() if config.get('constant')] try: for attr, mapconfig in data._attribute_map.items(): if attr in constants: continue value = getattr(data, attr) if value is None: continue local_type = mapconfig['type'] internal_data_type = local_type.strip('[]{}') if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): continue setattr( data, attr, self._deserialize(local_type, value) ) return data except AttributeError: return response, class_name = self._classify_target(target_obj, data) if isinstance(response, basestring): return self.deserialize_data(data, response) elif isinstance(response, type) and issubclass(response, Enum): return self.deserialize_enum(data, response) if data is None: return data try: attributes = response._attribute_map d_attrs = {} for attr, attr_desc in attributes.items(): # Check empty string. If it's not empty, someone has a real "additionalProperties"... if attr == "additional_properties" and attr_desc["key"] == '': continue raw_value = None # Enhance attr_desc with some dynamic data attr_desc = attr_desc.copy() # Do a copy, do not change the real one internal_data_type = attr_desc["type"].strip('[]{}') if internal_data_type in self.dependencies: attr_desc["internalType"] = self.dependencies[internal_data_type] for key_extractor in self.key_extractors: found_value = key_extractor(attr, attr_desc, data) if found_value is not None: if raw_value is not None and raw_value != found_value: msg = ("Ignoring extracted value '%s' from %s for key '%s'" " (duplicate extraction, follow extractors order)" ) _LOGGER.warning( msg, found_value, key_extractor, attr ) continue raw_value = found_value value = self.deserialize_data(raw_value, attr_desc['type']) d_attrs[attr] = value except (AttributeError, TypeError, KeyError) as err: msg = "Unable to deserialize to object: " + class_name raise_with_traceback(DeserializationError, msg, err) else: additional_properties = self._build_additional_properties(attributes, data) return self._instantiate_model(response, d_attrs, additional_properties) def _build_additional_properties(self, attribute_map, data): if not self.additional_properties_detection: return None if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != '': # Check empty string. If it's not empty, someone has a real "additionalProperties" return None if isinstance(data, ET.Element): data = {el.tag: el.text for el in data} known_keys = {_decode_attribute_map_key(_FLATTEN.split(desc['key'])[0]) for desc in attribute_map.values() if desc['key'] != ''} present_keys = set(data.keys()) missing_keys = present_keys - known_keys return {key: data[key] for key in missing_keys} def _classify_target(self, target, data): """Check to see whether the deserialization target object can be classified into a subclass. Once classification has been determined, initialize object. :param str target: The target object type to deserialize to. :param str/dict data: The response data to deseralize. """ if target is None: return None, None if isinstance(target, basestring): try: target = self.dependencies[target] except KeyError: return target, target try: target = target._classify(data, self.dependencies) except AttributeError: pass # Target is not a Model, no classify return target, target.__class__.__name__ def failsafe_deserialize(self, target_obj, data, content_type=None): """Ignores any errors encountered in deserialization, and falls back to not deserializing the object. Recommended for use in error deserialization, as we want to return the HttpResponseError to users, and not have them deal with a deserialization error. :param str target_obj: The target object type to deserialize to. :param str/dict data: The response data to deseralize. :param str content_type: Swagger "produces" if available. """ try: return self(target_obj, data, content_type=content_type) except: _LOGGER.warning( "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True ) return None @staticmethod def _unpack_content(raw_data, content_type=None): """Extract the correct structure for deserialization. If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. if we can't, raise. Your Pipeline should have a RawDeserializer. If not a pipeline response and raw_data is bytes or string, use content-type to decode it. If no content-type, try JSON. If raw_data is something else, bypass all logic and return it directly. :param raw_data: Data to be processed. :param content_type: How to parse if raw_data is a string/bytes. :raises JSONDecodeError: If JSON is requested and parsing is impossible. :raises UnicodeDecodeError: If bytes is not UTF8 """ # This avoids a circular dependency. We might want to consider RawDesializer is more generic # than the pipeline concept, and put it in a toolbox, used both here and in pipeline. TBD. from .pipeline.universal import RawDeserializer # Assume this is enough to detect a Pipeline Response without importing it context = getattr(raw_data, "context", {}) if context: if RawDeserializer.CONTEXT_NAME in context: return context[RawDeserializer.CONTEXT_NAME] raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") #Assume this is enough to recognize universal_http.ClientResponse without importing it if hasattr(raw_data, "body"): return RawDeserializer.deserialize_from_http_generics( raw_data.text(), raw_data.headers ) # Assume this enough to recognize requests.Response without importing it. if hasattr(raw_data, '_content_consumed'): return RawDeserializer.deserialize_from_http_generics( raw_data.text, raw_data.headers ) if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, 'read'): return RawDeserializer.deserialize_from_text(raw_data, content_type) return raw_data def _instantiate_model(self, response, attrs, additional_properties=None): """Instantiate a response model passing in deserialized args. :param response: The response model class. :param d_attrs: The deserialized response attributes. """ if callable(response): subtype = getattr(response, '_subtype_map', {}) try: readonly = [k for k, v in response._validation.items() if v.get('readonly')] const = [k for k, v in response._validation.items() if v.get('constant')] kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} response_obj = response(**kwargs) for attr in readonly: setattr(response_obj, attr, attrs.get(attr)) if additional_properties: response_obj.additional_properties = additional_properties return response_obj except TypeError as err: msg = "Unable to deserialize {} into model {}. ".format( kwargs, response) raise DeserializationError(msg + str(err)) else: try: for attr, value in attrs.items(): setattr(response, attr, value) return response except Exception as exp: msg = "Unable to populate response model. " msg += "Type: {}, Error: {}".format(type(response), exp) raise DeserializationError(msg) def deserialize_data(self, data, data_type): """Process data for deserialization according to data type. :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. :raises: DeserializationError if deserialization fails. :return: Deserialized object. """ if data is None: return data try: if not data_type: return data if data_type in self.basic_types.values(): return self.deserialize_basic(data, data_type) if data_type in self.deserialize_type: if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): return data is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: return None data_val = self.deserialize_type[data_type](data) return data_val iter_type = data_type[0] + data_type[-1] if iter_type in self.deserialize_type: return self.deserialize_type[iter_type](data, data_type[1:-1]) obj_type = self.dependencies[data_type] if issubclass(obj_type, Enum): if isinstance(data, ET.Element): data = data.text return self.deserialize_enum(data, obj_type) except (ValueError, TypeError, AttributeError) as err: msg = "Unable to deserialize response data." msg += " Data: {}, {}".format(data, data_type) raise_with_traceback(DeserializationError, msg, err) else: return self._deserialize(obj_type, data) def deserialize_iter(self, attr, iter_type): """Deserialize an iterable. :param list attr: Iterable to be deserialized. :param str iter_type: The type of object in the iterable. :rtype: list """ if attr is None: return None if isinstance(attr, ET.Element): # If I receive an element here, get the children attr = list(attr) if not isinstance(attr, (list, set)): raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format( iter_type, type(attr) )) return [self.deserialize_data(a, iter_type) for a in attr] def deserialize_dict(self, attr, dict_type): """Deserialize a dictionary. :param dict/list attr: Dictionary to be deserialized. Also accepts a list of key, value pairs. :param str dict_type: The object type of the items in the dictionary. :rtype: dict """ if isinstance(attr, list): return {x['key']: self.deserialize_data(x['value'], dict_type) for x in attr} if isinstance(attr, ET.Element): # Transform value into {"Key": "value"} attr = {el.tag: el.text for el in attr} return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} def deserialize_object(self, attr, **kwargs): """Deserialize a generic object. This will be handled as a dictionary. :param dict attr: Dictionary to be deserialized. :rtype: dict :raises: TypeError if non-builtin datatype encountered. """ if attr is None: return None if isinstance(attr, ET.Element): # Do no recurse on XML, just return the tree as-is return attr if isinstance(attr, basestring): return self.deserialize_basic(attr, 'str') obj_type = type(attr) if obj_type in self.basic_types: return self.deserialize_basic(attr, self.basic_types[obj_type]) if obj_type is _long_type: return self.deserialize_long(attr) if obj_type == dict: deserialized = {} for key, value in attr.items(): try: deserialized[key] = self.deserialize_object( value, **kwargs) except ValueError: deserialized[key] = None return deserialized if obj_type == list: deserialized = [] for obj in attr: try: deserialized.append(self.deserialize_object( obj, **kwargs)) except ValueError: pass return deserialized else: error = "Cannot deserialize generic object with type: " raise TypeError(error + str(obj_type)) def deserialize_basic(self, attr, data_type): """Deserialize baisc builtin data type from string. Will attempt to convert to str, int, float and bool. This function will also accept '1', '0', 'true' and 'false' as valid bool values. :param str attr: response string to be deserialized. :param str data_type: deserialization data type. :rtype: str, int, float or bool :raises: TypeError if string format is not valid. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text if isinstance(attr, ET.Element): attr = attr.text if not attr: if data_type == "str": # None or '', node is empty string. return '' else: # None or '', node with a strong type is None. # Don't try to model "empty bool" or "empty int" return None if data_type == 'bool': if attr in [True, False, 1, 0]: return bool(attr) elif isinstance(attr, basestring): if attr.lower() in ['true', '1']: return True elif attr.lower() in ['false', '0']: return False raise TypeError("Invalid boolean value: {}".format(attr)) if data_type == 'str': return self.deserialize_unicode(attr) return eval(data_type)(attr) @staticmethod def deserialize_unicode(data): """Preserve unicode objects in Python 2, otherwise return data as a string. :param str data: response string to be deserialized. :rtype: str or unicode """ # We might be here because we have an enum modeled as string, # and we try to deserialize a partial dict with enum inside if isinstance(data, Enum): return data # Consider this is real string try: if isinstance(data, unicode): return data except NameError: return str(data) else: return str(data) @staticmethod def deserialize_enum(data, enum_obj): """Deserialize string into enum object. If the string is not a valid enum value it will be returned as-is and a warning will be logged. :param str data: Response string to be deserialized. If this value is None or invalid it will be returned as-is. :param Enum enum_obj: Enum object to deserialize to. :rtype: Enum """ if isinstance(data, enum_obj) or data is None: return data if isinstance(data, Enum): data = data.value if isinstance(data, int): # Workaround. We might consider remove it in the future. # https://github.com/Azure/azure-rest-api-specs/issues/141 try: return list(enum_obj.__members__.values())[data] except IndexError: error = "{!r} is not a valid index for enum {!r}" raise DeserializationError(error.format(data, enum_obj)) try: return enum_obj(str(data)) except ValueError: for enum_value in enum_obj: if enum_value.value.lower() == str(data).lower(): return enum_value # We don't fail anymore for unknown value, we deserialize as a string _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) return Deserializer.deserialize_unicode(data) @staticmethod def deserialize_bytearray(attr): """Deserialize string into bytearray. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text return bytearray(b64decode(attr)) @staticmethod def deserialize_base64(attr): """Deserialize base64 encoded string into string. :param str attr: response string to be deserialized. :rtype: bytearray :raises: TypeError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text padding = '=' * (3 - (len(attr) + 3) % 4) attr = attr + padding encoded = attr.replace('-', '+').replace('_', '/') return b64decode(encoded) @staticmethod def deserialize_decimal(attr): """Deserialize string into Decimal object. :param str attr: response string to be deserialized. :rtype: Decimal :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: return decimal.Decimal(attr) except decimal.DecimalException as err: msg = "Invalid decimal {}".format(attr) raise_with_traceback(DeserializationError, msg, err) @staticmethod def deserialize_long(attr): """Deserialize string into long (Py2) or int (Py3). :param str attr: response string to be deserialized. :rtype: long or int :raises: ValueError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text return _long_type(attr) @staticmethod def deserialize_duration(attr): """Deserialize ISO-8601 formatted string into TimeDelta object. :param str attr: response string to be deserialized. :rtype: TimeDelta :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: duration = isodate.parse_duration(attr) except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize duration object." raise_with_traceback(DeserializationError, msg, err) else: return duration @staticmethod def deserialize_date(attr): """Deserialize ISO-8601 formatted string into Date object. :param str attr: response string to be deserialized. :rtype: Date :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text if re.search(r"[^\W\d_]", attr, re.I + re.U): raise DeserializationError("Date must have only digits and -. Received: %s" % attr) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. return isodate.parse_date(attr, defaultmonth=None, defaultday=None) @staticmethod def deserialize_time(attr): """Deserialize ISO-8601 formatted string into time object. :param str attr: response string to be deserialized. :rtype: datetime.time :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text if re.search(r"[^\W\d_]", attr, re.I + re.U): raise DeserializationError("Date must have only digits and -. Received: %s" % attr) return isodate.parse_time(attr) @staticmethod def deserialize_rfc(attr): """Deserialize RFC-1123 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) date_obj = datetime.datetime( *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0)/60)) ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) except ValueError as err: msg = "Cannot deserialize to rfc datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj @staticmethod def deserialize_iso(attr): """Deserialize ISO-8601 formatted string into Datetime object. :param str attr: response string to be deserialized. :rtype: Datetime :raises: DeserializationError if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: attr = attr.upper() match = Deserializer.valid_date.match(attr) if not match: raise ValueError("Invalid datetime string: " + attr) check_decimal = attr.split('.') if len(check_decimal) > 1: decimal_str = "" for digit in check_decimal[1]: if digit.isdigit(): decimal_str += digit else: break if len(decimal_str) > 6: attr = attr.replace(decimal_str, decimal_str[0:6]) date_obj = isodate.parse_datetime(attr) test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") except(ValueError, OverflowError, AttributeError) as err: msg = "Cannot deserialize datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj @staticmethod def deserialize_unix(attr): """Serialize Datetime object into IntTime format. This is represented as seconds. :param int attr: Object to be serialized. :rtype: Datetime :raises: DeserializationError if format invalid """ if isinstance(attr, ET.Element): attr = int(attr.text) try: date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) except ValueError as err: msg = "Cannot deserialize to unix datetime object." raise_with_traceback(DeserializationError, msg, err) else: return date_obj msrest-for-python-0.6.21/msrest/service_client.py000066400000000000000000000367161400412460500221160ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import logging import os import sys try: from urlparse import urljoin, urlparse except ImportError: from urllib.parse import urljoin, urlparse import warnings from typing import List, Any, Dict, Union, IO, Tuple, Optional, Callable, Iterator, cast, TYPE_CHECKING # pylint: disable=unused-import from .authentication import Authentication from .universal_http import ClientRequest, ClientResponse from .universal_http.requests import ( RequestsHTTPSender, ) from .pipeline import Request, Pipeline, HTTPPolicy, SansIOHTTPPolicy from .pipeline.requests import ( PipelineRequestsHTTPSender, RequestsCredentialsPolicy, RequestsPatchSession ) from .pipeline.universal import ( HTTPLogger, RawDeserializer ) if TYPE_CHECKING: from .configuration import Configuration # pylint: disable=unused-import from .universal_http.requests import RequestsClientResponse # pylint: disable=unused-import import requests # pylint: disable=unused-import _LOGGER = logging.getLogger(__name__) class SDKClient(object): """The base class of all generated SDK client. """ def __init__(self, creds, config): # type: (Any, Configuration) -> None self._client = ServiceClient(creds, config) def close(self): # type: () -> None """Close the client if keep_alive is True. """ self._client.close() def __enter__(self): # type: () -> SDKClient self._client.__enter__() return self def __exit__(self, *exc_details): self._client.__exit__(*exc_details) class _ServiceClientCore(object): """Service client core methods. This contains methods are sans I/O and not tight to sync or async implementation. :param Configuration config: Service configuration. :param Authentication creds: Authenticated credentials. """ def __init__(self, config): # type: (Any, Configuration) -> None if config is None: raise ValueError("Config is a required parameter") self.config = config def _request(self, method, url, params, headers, content, form_content): # type: (str, str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create ClientRequest object. :param str url: URL for the request. :param dict params: URL query parameters. :param dict headers: Headers :param dict form_content: Form content """ request = ClientRequest(method, self.format_url(url)) if params: request.format_parameters(params) if headers: request.headers.update(headers) # All requests should contain a Accept. # This should be done by Autorest, but wasn't in old Autorest # Force it for now, but might deprecate it later. if "Accept" not in request.headers: _LOGGER.debug("Accept header absent and forced to application/json") request.headers['Accept'] = 'application/json' if content is not None: request.add_content(content) if form_content: request.add_formdata(form_content) return request def stream_upload(self, data, callback): """Generator for streaming request body data. :param data: A file-like object to be streamed. :param callback: Custom callback for monitoring progress. """ while True: chunk = data.read(self.config.connection.data_block_size) if not chunk: break if callback and callable(callback): callback(chunk, response=None) yield chunk def format_url(self, url, **kwargs): # type: (str, Any) -> str """Format request URL with the client base URL, unless the supplied URL is already absolute. :param str url: The request URL to be formatted if necessary. """ url = url.format(**kwargs) parsed = urlparse(url) if not parsed.scheme or not parsed.netloc: url = url.lstrip('/') base = self.config.base_url.format(**kwargs).rstrip('/') url = urljoin(base + '/', url) return url def get(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a GET request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('GET', url, params, headers, content, form_content) request.method = 'GET' return request def put(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a PUT request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('PUT', url, params, headers, content, form_content) return request def post(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a POST request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('POST', url, params, headers, content, form_content) return request def head(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a HEAD request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('HEAD', url, params, headers, content, form_content) return request def patch(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a PATCH request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('PATCH', url, params, headers, content, form_content) return request def delete(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a DELETE request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('DELETE', url, params, headers, content, form_content) return request def merge(self, url, params=None, headers=None, content=None, form_content=None): # type: (str, Optional[Dict[str, str]], Optional[Dict[str, str]], Any, Optional[Dict[str, Any]]) -> ClientRequest """Create a MERGE request object. :param str url: The request URL. :param dict params: Request URL parameters. :param dict headers: Headers :param dict form_content: Form content """ request = self._request('MERGE', url, params, headers, content, form_content) return request class ServiceClient(_ServiceClientCore): """REST Service Client. Maintains client pipeline and handles all requests and responses. :param creds: Deprecated, will be removed in next major version. Creds are now read from config.credentials. :param Configuration config: Service configuration. """ def __init__(self, creds, config): # type: (Any, Configuration) -> None super(ServiceClient, self).__init__(config) # If not Autorest, check if credentials comes from here and not config if creds and config.credentials is None: warnings.warn("Creds parameter is deprecated. Set config.credentials instead.", DeprecationWarning) config.credentials = creds self.config.pipeline = self._create_default_pipeline() def _create_default_pipeline(self): # type: () -> Pipeline[ClientRequest, RequestsClientResponse] creds = self.config.credentials policies = [ self.config.user_agent_policy, # UserAgent policy RequestsPatchSession(), # Support deprecated operation config at the session level self.config.http_logger_policy # HTTP request/response log ] # type: List[Union[HTTPPolicy, SansIOHTTPPolicy]] if creds: if isinstance(creds, (HTTPPolicy, SansIOHTTPPolicy)): policies.insert(1, creds) else: # Assume this is the old credentials class, and then requests. Wrap it. policies.insert(1, RequestsCredentialsPolicy(creds)) # Set credentials for requests based session return Pipeline( policies, PipelineRequestsHTTPSender(RequestsHTTPSender(self.config)) # Send HTTP request using requests ) def __enter__(self): # type: () -> ServiceClient self.config.keep_alive = True self.config.pipeline.__enter__() return self def __exit__(self, *exc_details): self.config.pipeline.__exit__(*exc_details) self.config.keep_alive = False def close(self): # type: () -> None """Close the pipeline if keep_alive is True. """ self.config.pipeline.__exit__() # type: ignore def send_formdata(self, request, headers=None, content=None, **config): """Send data as a multipart form-data request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. This method is deprecated, and shouldn't be used anymore. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata. :param config: Any specific config overrides. """ request.headers = headers request.add_formdata(content) return self.send(request, **config) def send(self, request, headers=None, content=None, **kwargs): """Prepare and send request object according to configuration. :param ClientRequest request: The request object to be sent. :param dict headers: Any headers to add to the request. :param content: Any body data to add to the request. :param config: Any specific config overrides """ # "content" and "headers" are deprecated, only old SDK if headers: request.headers.update(headers) if not request.files and request.data is None and content is not None: request.add_content(content) # End of deprecation response = None kwargs.setdefault('stream', True) try: pipeline_response = self.config.pipeline.run(request, **kwargs) # There is too much thing that expects this method to return a "requests.Response" # to break it in a compatible release. # Also, to be pragmatic in the "sync" world "requests" rules anyway. # However, attach the Universal HTTP response # to get the streaming generator. response = pipeline_response.http_response.internal_response response._universal_http_response = pipeline_response.http_response response.context = pipeline_response.context return response finally: self._close_local_session_if_necessary(response, kwargs['stream']) def _close_local_session_if_necessary(self, response, stream): # Here, it's a local session, I might close it. if not self.config.keep_alive and (not response or not stream): self.config.pipeline._sender.driver.session.close() def stream_download(self, data, callback): # type: (Union[requests.Response, ClientResponse], Callable) -> Iterator[bytes] """Generator for streaming request body data. :param data: A response object to be streamed. :param callback: Custom callback for monitoring progress. """ block = self.config.connection.data_block_size try: # Assume this is ClientResponse, which it should be if backward compat was not important return cast(ClientResponse, data).stream_download(block, callback) except AttributeError: try: # Assume this is the patched requests.Response from "send" return data._universal_http_response.stream_download(block, callback) # type: ignore except AttributeError: # Assume this is a raw requests.Response from .universal_http.requests import RequestsClientResponse response = RequestsClientResponse(None, data) return response.stream_download(block, callback) def add_header(self, header, value): # type: (str, str) -> None """Add a persistent header - this header will be applied to all requests sent during the current client session. .. deprecated:: 0.5.0 Use config.headers instead :param str header: The header name. :param str value: The header value. """ warnings.warn("Private attribute _client.add_header is deprecated. Use config.headers instead.", DeprecationWarning) self.config.headers[header] = value msrest-for-python-0.6.21/msrest/universal_http/000077500000000000000000000000001400412460500216005ustar00rootroot00000000000000msrest-for-python-0.6.21/msrest/universal_http/__init__.py000066400000000000000000000372631400412460500237240ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 import abc try: import configparser from configparser import NoOptionError except ImportError: import ConfigParser as configparser # type: ignore from ConfigParser import NoOptionError # type: ignore import json import logging import os.path try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse import xml.etree.ElementTree as ET from typing import TYPE_CHECKING, Generic, TypeVar, cast, IO, List, Union, Any, Mapping, Dict, Optional, Tuple, Callable, Iterator, MutableMapping # pylint: disable=unused-import HTTPResponseType = TypeVar("HTTPResponseType", bound='HTTPClientResponse') # This file is NOT using any "requests" HTTP implementation # However, the CaseInsensitiveDict is handy. # If one day we reach the point where "requests" can be skip totally, # might provide our own implementation from requests.structures import CaseInsensitiveDict from ..exceptions import ClientRequestError, raise_with_traceback if TYPE_CHECKING: from ..serialization import Model # pylint: disable=unused-import _LOGGER = logging.getLogger(__name__) try: ABC = abc.ABC except AttributeError: # Python 2.7, abc exists, but not ABC ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()}) # type: ignore try: from contextlib import AbstractContextManager # type: ignore except ImportError: # Python <= 3.5 class AbstractContextManager(object): # type: ignore def __enter__(self): """Return `self` upon entering the runtime context.""" return self @abc.abstractmethod def __exit__(self, exc_type, exc_value, traceback): """Raise any exception triggered within the runtime context.""" return None class HTTPSender(AbstractContextManager, ABC): """An http sender ABC. """ @abc.abstractmethod def send(self, request, **config): # type: (ClientRequest, Any) -> ClientResponse """Send the request using this HTTP sender. """ pass class HTTPSenderConfiguration(object): """HTTP sender configuration. This is composed of generic HTTP configuration, and could be use as a common HTTP configuration format. :param str filepath: Path to existing config file (optional). """ def __init__(self, filepath=None): # Communication configuration self.connection = ClientConnection() # Headers (sent with every requests) self.headers = {} # type: Dict[str, str] # ProxyConfiguration self.proxies = ClientProxies() # Redirect configuration self.redirect_policy = ClientRedirectPolicy() self._config = configparser.ConfigParser() self._config.optionxform = str # type: ignore if filepath: self.load(filepath) def _clear_config(self): # type: () -> None """Clearout config object in memory.""" for section in self._config.sections(): self._config.remove_section(section) def save(self, filepath): # type: (str) -> None """Save current configuration to file. :param str filepath: Path to file where settings will be saved. :raises: ValueError if supplied filepath cannot be written to. """ sections = [ "Connection", "Proxies", "RedirectPolicy"] for section in sections: self._config.add_section(section) self._config.set("Connection", "timeout", self.connection.timeout) self._config.set("Connection", "verify", self.connection.verify) self._config.set("Connection", "cert", self.connection.cert) self._config.set("Proxies", "proxies", self.proxies.proxies) self._config.set("Proxies", "env_settings", self.proxies.use_env_settings) self._config.set("RedirectPolicy", "allow", self.redirect_policy.allow) self._config.set("RedirectPolicy", "max_redirects", self.redirect_policy.max_redirects) try: with open(filepath, 'w') as configfile: self._config.write(configfile) except (KeyError, EnvironmentError): error = "Supplied config filepath invalid." raise_with_traceback(ValueError, error) finally: self._clear_config() def load(self, filepath): # type: (str) -> None """Load configuration from existing file. :param str filepath: Path to existing config file. :raises: ValueError if supplied config file is invalid. """ try: self._config.read(filepath) import ast self.connection.timeout = \ self._config.getint("Connection", "timeout") self.connection.verify = \ self._config.getboolean("Connection", "verify") self.connection.cert = \ self._config.get("Connection", "cert") self.proxies.proxies = \ ast.literal_eval(self._config.get("Proxies", "proxies")) self.proxies.use_env_settings = \ self._config.getboolean("Proxies", "env_settings") self.redirect_policy.allow = \ self._config.getboolean("RedirectPolicy", "allow") self.redirect_policy.max_redirects = \ self._config.getint("RedirectPolicy", "max_redirects") except (ValueError, EnvironmentError, NoOptionError): error = "Supplied config file incompatible." raise_with_traceback(ValueError, error) finally: self._clear_config() class ClientRequest(object): """Represents a HTTP request. URL can be given without query parameters, to be added later using "format_parameters". Instance can be created without data, to be added later using "add_content" Instance can be created without files, to be added later using "add_formdata" :param str method: HTTP method (GET, HEAD, etc.) :param str url: At least complete scheme/host/path :param dict[str,str] headers: HTTP headers :param files: Files list. :param data: Body to be sent. :type data: bytes or str. """ def __init__(self, method, url, headers=None, files=None, data=None): # type: (str, str, Mapping[str, str], Any, Any) -> None self.method = method self.url = url self.headers = CaseInsensitiveDict(headers) self.files = files self.data = data def __repr__(self): return '' % (self.method) @property def body(self): """Alias to data.""" return self.data @body.setter def body(self, value): self.data = value def format_parameters(self, params): # type: (Dict[str, str]) -> None """Format parameters into a valid query string. It's assumed all parameters have already been quoted as valid URL strings. :param dict params: A dictionary of parameters. """ query = urlparse(self.url).query if query: self.url = self.url.partition('?')[0] existing_params = { p[0]: p[-1] for p in [p.partition('=') for p in query.split('&')] } params.update(existing_params) query_params = ["{}={}".format(k, v) for k, v in params.items()] query = '?' + '&'.join(query_params) self.url = self.url + query def add_content(self, data): # type: (Optional[Union[Dict[str, Any], ET.Element]]) -> None """Add a body to the request. :param data: Request body data, can be a json serializable object (e.g. dictionary) or a generator (e.g. file data). """ if data is None: return if isinstance(data, ET.Element): bytes_data = ET.tostring(data, encoding="utf8") self.headers['Content-Length'] = str(len(bytes_data)) self.data = bytes_data return # By default, assume JSON try: self.data = json.dumps(data) self.headers['Content-Length'] = str(len(self.data)) except TypeError: self.data = data @staticmethod def _format_data(data): # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] """Format field data according to whether it is a stream or a string for a form-data request. :param data: The request field data. :type data: str or file-like object. """ if hasattr(data, 'read'): data = cast(IO, data) data_name = None try: if data.name[0] != '<' and data.name[-1] != '>': data_name = os.path.basename(data.name) except (AttributeError, TypeError): pass return (data_name, data, "application/octet-stream") return (None, cast(str, data)) def add_formdata(self, content=None): # type: (Optional[Dict[str, str]]) -> None """Add data as a multipart form-data request to the request. We only deal with file-like objects or strings at this point. The requests is not yet streamed. :param dict headers: Any headers to add to the request. :param dict content: Dictionary of the fields of the formdata. """ if content is None: content = {} content_type = self.headers.pop('Content-Type', None) if self.headers else None if content_type and content_type.lower() == 'application/x-www-form-urlencoded': # Do NOT use "add_content" that assumes input is JSON self.data = {f: d for f, d in content.items() if d is not None} else: # Assume "multipart/form-data" self.files = {f: self._format_data(d) for f, d in content.items() if d is not None} class HTTPClientResponse(object): """Represent a HTTP response. No body is defined here on purpose, since async pipeline will provide async ways to access the body You have two differents types of body: - Full in-memory using "body" as bytes """ def __init__(self, request, internal_response): # type: (ClientRequest, Any) -> None self.request = request self.internal_response = internal_response self.status_code = None # type: Optional[int] self.headers = {} # type: MutableMapping[str, str] self.reason = None # type: Optional[str] def body(self): # type: () -> bytes """Return the whole body as bytes in memory. """ pass def text(self, encoding=None): # type: (str) -> str """Return the whole body as a string. :param str encoding: The encoding to apply. If None, use "utf-8-sig". Implementation can be smarter if they want (using headers). """ return self.body().decode(encoding or "utf-8-sig") def raise_for_status(self): """Raise for status. Should be overriden, but basic implementation provided. """ if self.status_code >= 400: raise ClientRequestError("Received status code {}".format(self.status_code)) class ClientResponse(HTTPClientResponse): def stream_download(self, chunk_size=None, callback=None): # type: (Optional[int], Optional[Callable]) -> Iterator[bytes] """Generator for streaming request body data. Should be implemented by sub-classes if streaming download is supported. :param callback: Custom callback for monitoring progress. :param int chunk_size: """ pass class ClientRedirectPolicy(object): """Redirect configuration settings. """ def __init__(self): self.allow = True self.max_redirects = 30 def __bool__(self): # type: () -> bool """Whether redirects are allowed.""" return self.allow def __call__(self): # type: () -> int """Return configuration to be applied to connection.""" debug = "Configuring redirects: allow=%r, max=%r" _LOGGER.debug(debug, self.allow, self.max_redirects) return self.max_redirects class ClientProxies(object): """Proxy configuration settings. Proxies can also be configured using HTTP_PROXY and HTTPS_PROXY environment variables, in which case set use_env_settings to True. """ def __init__(self): self.proxies = {} self.use_env_settings = True def __call__(self): # type: () -> Dict[str, str] """Return configuration to be applied to connection.""" proxy_string = "\n".join( [" {}: {}".format(k, v) for k, v in self.proxies.items()]) _LOGGER.debug("Configuring proxies: %r", proxy_string) debug = "Evaluate proxies against ENV settings: %r" _LOGGER.debug(debug, self.use_env_settings) return self.proxies def add(self, protocol, proxy_url): # type: (str, str) -> None """Add proxy. :param str protocol: Protocol for which proxy is to be applied. Can be 'http', 'https', etc. Can also include host. :param str proxy_url: The proxy URL. Where basic auth is required, use the format: http://user:password@host """ self.proxies[protocol] = proxy_url class ClientConnection(object): """Request connection configuration settings. """ def __init__(self): self.timeout = 100 self.verify = True self.cert = None self.data_block_size = 4096 def __call__(self): # type: () -> Dict[str, Union[str, int]] """Return configuration to be applied to connection.""" debug = "Configuring request: timeout=%r, verify=%r, cert=%r" _LOGGER.debug(debug, self.timeout, self.verify, self.cert) return {'timeout': self.timeout, 'verify': self.verify, 'cert': self.cert} __all__ = [ 'ClientRequest', 'ClientResponse', 'HTTPSender', # Generic HTTP configuration 'HTTPSenderConfiguration', 'ClientRedirectPolicy', 'ClientProxies', 'ClientConnection' ] try: from .async_abc import AsyncHTTPSender, AsyncClientResponse # pylint: disable=unused-import from .async_abc import __all__ as _async_all __all__ += _async_all except SyntaxError: # Python 2 pass except ImportError: # pyinstaller won't include Py3 files in Py2.7 mode pass msrest-for-python-0.6.21/msrest/universal_http/aiohttp.py000066400000000000000000000077711400412460500236360ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from typing import Any, Callable, AsyncIterator, Optional import aiohttp from multidict import CIMultiDict from . import AsyncHTTPSender, ClientRequest, AsyncClientResponse # Matching requests, because why not? CONTENT_CHUNK_SIZE = 10 * 1024 class AioHTTPSender(AsyncHTTPSender): """AioHttp HTTP sender implementation. """ def __init__(self, *, loop=None): self._session = aiohttp.ClientSession(loop=loop) async def __aenter__(self): await self._session.__aenter__() return self async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ await self._session.__aexit__(*exc_details) async def send(self, request: ClientRequest, **config: Any) -> AsyncClientResponse: """Send the request using this HTTP sender. Will pre-load the body into memory to be available with a sync method. pass stream=True to avoid this behavior. """ result = await self._session.request( request.method, request.url, **config ) response = AioHttpClientResponse(request, result) if not config.get("stream", False): await response.load_body() return response class AioHttpClientResponse(AsyncClientResponse): def __init__(self, request: ClientRequest, aiohttp_response: aiohttp.ClientResponse) -> None: super(AioHttpClientResponse, self).__init__(request, aiohttp_response) # https://aiohttp.readthedocs.io/en/stable/client_reference.html#aiohttp.ClientResponse self.status_code = aiohttp_response.status self.headers = CIMultiDict(aiohttp_response.headers) self.reason = aiohttp_response.reason self._body = None def body(self) -> bytes: """Return the whole body as bytes in memory. """ if not self._body: raise ValueError("Body is not available. Call async method load_body, or do your call with stream=False.") return self._body async def load_body(self) -> None: """Load in memory the body, so it could be accessible from sync methods.""" self._body = await self.internal_response.read() def raise_for_status(self): self.internal_response.raise_for_status() def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIterator[bytes]: """Generator for streaming request body data. """ chunk_size = chunk_size or CONTENT_CHUNK_SIZE async def async_gen(resp): while True: chunk = await resp.content.read(chunk_size) if not chunk: break callback(chunk, resp) return async_gen(self.internal_response) msrest-for-python-0.6.21/msrest/universal_http/async_abc.py000066400000000000000000000062611400412460500241010ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import abc from typing import Any, List, Union, Callable, AsyncIterator, Optional try: from contextlib import AbstractAsyncContextManager # type: ignore except ImportError: # Python <= 3.7 class AbstractAsyncContextManager(object): # type: ignore async def __aenter__(self): """Return `self` upon entering the runtime context.""" return self @abc.abstractmethod async def __aexit__(self, exc_type, exc_value, traceback): """Raise any exception triggered within the runtime context.""" return None from . import ClientRequest, HTTPClientResponse class AsyncClientResponse(HTTPClientResponse): def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIterator[bytes]: """Generator for streaming request body data. Should be implemented by sub-classes if streaming download is supported. :param callback: Custom callback for monitoring progress. :param int chunk_size: """ pass class AsyncHTTPSender(AbstractAsyncContextManager, abc.ABC): """An http sender ABC. """ @abc.abstractmethod async def send(self, request: ClientRequest, **config: Any) -> AsyncClientResponse: """Send the request using this HTTP sender. """ pass def build_context(self) -> Any: """Allow the sender to build a context that will be passed across the pipeline with the request. Return type has no constraints. Implementation is not required and None by default. """ return None def __enter__(self): raise TypeError("Use 'async with' instead") def __exit__(self, exc_type, exc_val, exc_tb): # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover __all__ = [ 'AsyncHTTPSender', 'AsyncClientResponse' ]msrest-for-python-0.6.21/msrest/universal_http/async_requests.py000066400000000000000000000217451400412460500252330ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import asyncio from collections.abc import AsyncIterator import functools import logging from typing import Any, Callable, Optional, AsyncIterator as AsyncIteratorType from oauthlib import oauth2 import requests from requests.models import CONTENT_CHUNK_SIZE from ..exceptions import ( TokenExpiredError, ClientRequestError, raise_with_traceback) from . import AsyncHTTPSender, ClientRequest, AsyncClientResponse from .requests import ( BasicRequestsHTTPSender, RequestsHTTPSender, HTTPRequestsClientResponse ) _LOGGER = logging.getLogger(__name__) class AsyncBasicRequestsHTTPSender(BasicRequestsHTTPSender, AsyncHTTPSender): # type: ignore async def __aenter__(self): return super(AsyncBasicRequestsHTTPSender, self).__enter__() async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ return super(AsyncBasicRequestsHTTPSender, self).__exit__() async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore """Send the request using this HTTP sender. """ # It's not recommended to provide its own session, and is mostly # to enable some legacy code to plug correctly session = kwargs.pop('session', self.session) loop = kwargs.get("loop", asyncio.get_event_loop()) future = loop.run_in_executor( None, functools.partial( session.request, request.method, request.url, **kwargs ) ) try: return AsyncRequestsClientResponse( request, await future ) except requests.RequestException as err: msg = "Error occurred in request." raise_with_traceback(ClientRequestError, msg, err) class AsyncRequestsHTTPSender(AsyncBasicRequestsHTTPSender, RequestsHTTPSender): # type: ignore async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore """Send the request using this HTTP sender. """ requests_kwargs = self._configure_send(request, **kwargs) return await super(AsyncRequestsHTTPSender, self).send(request, **requests_kwargs) class _MsrestStopIteration(Exception): pass def _msrest_next(iterator): """"To avoid: TypeError: StopIteration interacts badly with generators and cannot be raised into a Future """ try: return next(iterator) except StopIteration: raise _MsrestStopIteration() class StreamDownloadGenerator(AsyncIterator): def __init__(self, response: requests.Response, user_callback: Optional[Callable] = None, block: Optional[int] = None) -> None: self.response = response self.block = block or CONTENT_CHUNK_SIZE self.user_callback = user_callback self.iter_content_func = self.response.iter_content(self.block) async def __anext__(self): loop = asyncio.get_event_loop() try: chunk = await loop.run_in_executor( None, _msrest_next, self.iter_content_func, ) if not chunk: raise _MsrestStopIteration() if self.user_callback and callable(self.user_callback): self.user_callback(chunk, self.response) return chunk except _MsrestStopIteration: self.response.close() raise StopAsyncIteration() except Exception as err: _LOGGER.warning("Unable to stream download: %s", err) self.response.close() raise class AsyncRequestsClientResponse(AsyncClientResponse, HTTPRequestsClientResponse): def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIteratorType[bytes]: """Generator for streaming request body data. :param callback: Custom callback for monitoring progress. :param int chunk_size: """ return StreamDownloadGenerator( self.internal_response, callback, chunk_size ) # Trio support try: import trio class TrioStreamDownloadGenerator(AsyncIterator): def __init__(self, response: requests.Response, user_callback: Optional[Callable] = None, block: Optional[int] = None) -> None: self.response = response self.block = block or CONTENT_CHUNK_SIZE self.user_callback = user_callback self.iter_content_func = self.response.iter_content(self.block) async def __anext__(self): try: chunk = await trio.run_sync_in_worker_thread( _msrest_next, self.iter_content_func, ) if not chunk: raise _MsrestStopIteration() if self.user_callback and callable(self.user_callback): self.user_callback(chunk, self.response) return chunk except _MsrestStopIteration: self.response.close() raise StopAsyncIteration() except Exception as err: _LOGGER.warning("Unable to stream download: %s", err) self.response.close() raise class TrioAsyncRequestsClientResponse(AsyncClientResponse, HTTPRequestsClientResponse): def stream_download(self, chunk_size: Optional[int] = None, callback: Optional[Callable] = None) -> AsyncIteratorType[bytes]: """Generator for streaming request body data. :param callback: Custom callback for monitoring progress. :param int chunk_size: """ return TrioStreamDownloadGenerator( self.internal_response, callback, chunk_size ) class AsyncTrioBasicRequestsHTTPSender(BasicRequestsHTTPSender, AsyncHTTPSender): # type: ignore async def __aenter__(self): return super(AsyncTrioBasicRequestsHTTPSender, self).__enter__() async def __aexit__(self, *exc_details): # pylint: disable=arguments-differ return super(AsyncTrioBasicRequestsHTTPSender, self).__exit__() async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore """Send the request using this HTTP sender. """ # It's not recommended to provide its own session, and is mostly # to enable some legacy code to plug correctly session = kwargs.pop('session', self.session) trio_limiter = kwargs.get("trio_limiter", None) future = trio.run_sync_in_worker_thread( functools.partial( session.request, request.method, request.url, **kwargs ), limiter=trio_limiter ) try: return TrioAsyncRequestsClientResponse( request, await future ) except requests.RequestException as err: msg = "Error occurred in request." raise_with_traceback(ClientRequestError, msg, err) class AsyncTrioRequestsHTTPSender(AsyncTrioBasicRequestsHTTPSender, RequestsHTTPSender): # type: ignore async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore """Send the request using this HTTP sender. """ requests_kwargs = self._configure_send(request, **kwargs) return await super(AsyncTrioRequestsHTTPSender, self).send(request, **requests_kwargs) except ImportError: # trio not installed passmsrest-for-python-0.6.21/msrest/universal_http/requests.py000066400000000000000000000410641400412460500240320ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- """ This module is the requests implementation of Pipeline ABC """ from __future__ import absolute_import # we have a "requests" module that conflicts with "requests" on Py2.7 import contextlib import logging import threading from typing import TYPE_CHECKING, List, Callable, Iterator, Any, Union, Dict, Optional # pylint: disable=unused-import import warnings try: from configparser import NoOptionError except ImportError: from ConfigParser import NoOptionError # type: ignore from oauthlib import oauth2 import requests from requests.models import CONTENT_CHUNK_SIZE from urllib3 import Retry # Needs requests 2.16 at least to be safe from ..exceptions import ( TokenExpiredError, ClientRequestError, raise_with_traceback) from . import HTTPSender, HTTPClientResponse, ClientResponse, HTTPSenderConfiguration if TYPE_CHECKING: from . import ClientRequest # pylint: disable=unused-import _LOGGER = logging.getLogger(__name__) class HTTPRequestsClientResponse(HTTPClientResponse): def __init__(self, request, requests_response): super(HTTPRequestsClientResponse, self).__init__(request, requests_response) self.status_code = requests_response.status_code self.headers = requests_response.headers self.reason = requests_response.reason def body(self): return self.internal_response.content def text(self, encoding=None): if encoding: self.internal_response.encoding = encoding return self.internal_response.text def raise_for_status(self): self.internal_response.raise_for_status() class RequestsClientResponse(HTTPRequestsClientResponse, ClientResponse): def stream_download(self, chunk_size=None, callback=None): # type: (Optional[int], Optional[Callable]) -> Iterator[bytes] """Generator for streaming request body data. :param callback: Custom callback for monitoring progress. :param int chunk_size: """ chunk_size = chunk_size or CONTENT_CHUNK_SIZE with contextlib.closing(self.internal_response) as response: # https://github.com/PyCQA/pylint/issues/1437 for chunk in response.iter_content(chunk_size): # pylint: disable=no-member if not chunk: break if callback and callable(callback): callback(chunk, response=response) yield chunk class BasicRequestsHTTPSender(HTTPSender): """Implements a basic requests HTTP sender. Since requests team recommends to use one session per requests, you should not consider this class as thread-safe, since it will use one Session per instance. In this simple implementation: - You provide the configured session if you want to, or a basic session is created. - All kwargs received by "send" are sent to session.request directly """ def __init__(self, session=None): # type: (Optional[requests.Session]) -> None self.session = session or requests.Session() def __enter__(self): # type: () -> BasicRequestsHTTPSender return self def __exit__(self, *exc_details): # pylint: disable=arguments-differ self.close() def close(self): self.session.close() def send(self, request, **kwargs): # type: (ClientRequest, Any) -> ClientResponse """Send request object according to configuration. Allowed kwargs are: - session : will override the driver session and use yours. Should NOT be done unless really required. - anything else is sent straight to requests. :param ClientRequest request: The request object to be sent. """ # It's not recommended to provide its own session, and is mostly # to enable some legacy code to plug correctly session = kwargs.pop('session', self.session) try: response = session.request( request.method, request.url, **kwargs) except requests.RequestException as err: msg = "Error occurred in request." raise_with_traceback(ClientRequestError, msg, err) return RequestsClientResponse(request, response) def _patch_redirect(session): # type: (requests.Session) -> None """Whether redirect policy should be applied based on status code. HTTP spec says that on 301/302 not HEAD/GET, should NOT redirect. But requests does, to follow browser more than spec https://github.com/requests/requests/blob/f6e13ccfc4b50dc458ee374e5dba347205b9a2da/requests/sessions.py#L305-L314 This patches "requests" to be more HTTP compliant. Note that this is super dangerous, since technically this is not public API. """ def enforce_http_spec(resp, request): if resp.status_code in (301, 302) and \ request.method not in ['GET', 'HEAD']: return False return True redirect_logic = session.resolve_redirects def wrapped_redirect(resp, req, **kwargs): attempt = enforce_http_spec(resp, req) return redirect_logic(resp, req, **kwargs) if attempt else [] wrapped_redirect.is_msrest_patched = True # type: ignore session.resolve_redirects = wrapped_redirect # type: ignore class RequestsHTTPSender(BasicRequestsHTTPSender): """A requests HTTP sender that can consume a msrest.Configuration object. This instance will consume the following configuration attributes: - connection - proxies - retry_policy - redirect_policy - enable_http_logger - hooks - session_configuration_callback """ _protocols = ['http://', 'https://'] # Set of authorized kwargs at the operation level _REQUESTS_KWARGS = [ 'cookies', 'verify', 'timeout', 'allow_redirects', 'proxies', 'verify', 'cert' ] def __init__(self, config=None): # type: (Optional[RequestHTTPSenderConfiguration]) -> None self._session_mapping = threading.local() self.config = config or RequestHTTPSenderConfiguration() super(RequestsHTTPSender, self).__init__() @property # type: ignore def session(self): try: return self._session_mapping.session except AttributeError: self._session_mapping.session = requests.Session() self._init_session(self._session_mapping.session) return self._session_mapping.session @session.setter def session(self, value): self._init_session(value) self._session_mapping.session = value def _init_session(self, session): # type: (requests.Session) -> None """Init session level configuration of requests. This is initialization I want to do once only on a session. """ _patch_redirect(session) # Change max_retries in current all installed adapters max_retries = self.config.retry_policy() for protocol in self._protocols: session.adapters[protocol].max_retries = max_retries def _configure_send(self, request, **kwargs): # type: (ClientRequest, Any) -> Dict[str, str] """Configure the kwargs to use with requests. See "send" for kwargs details. :param ClientRequest request: The request object to be sent. :returns: The requests.Session.request kwargs :rtype: dict[str,str] """ requests_kwargs = {} # type: Any session = kwargs.pop('session', self.session) # If custom session was not create here if session is not self.session: self._init_session(session) session.max_redirects = int(self.config.redirect_policy()) session.trust_env = bool(self.config.proxies.use_env_settings) # Initialize requests_kwargs with "config" value requests_kwargs.update(self.config.connection()) requests_kwargs['allow_redirects'] = bool(self.config.redirect_policy) requests_kwargs['headers'] = self.config.headers.copy() proxies = self.config.proxies() if proxies: requests_kwargs['proxies'] = proxies # Replace by operation level kwargs # We allow some of them, since some like stream or json are controled by msrest for key in kwargs: if key in self._REQUESTS_KWARGS: requests_kwargs[key] = kwargs[key] # Hooks. Deprecated, should be a policy def make_user_hook_cb(user_hook, session): def user_hook_cb(r, *args, **kwargs): kwargs.setdefault("msrest", {})['session'] = session return user_hook(r, *args, **kwargs) return user_hook_cb hooks = [] for user_hook in self.config.hooks: hooks.append(make_user_hook_cb(user_hook, self.session)) if hooks: requests_kwargs['hooks'] = {'response': hooks} # Configuration callback. Deprecated, should be a policy output_kwargs = self.config.session_configuration_callback( session, self.config, kwargs, **requests_kwargs ) if output_kwargs is not None: requests_kwargs = output_kwargs # If custom session was not create here if session is not self.session: requests_kwargs['session'] = session ### Autorest forced kwargs now ### # If Autorest needs this response to be streamable. True for compat. requests_kwargs['stream'] = kwargs.get('stream', True) if request.files: requests_kwargs['files'] = request.files elif request.data: requests_kwargs['data'] = request.data requests_kwargs['headers'].update(request.headers) return requests_kwargs def send(self, request, **kwargs): # type: (ClientRequest, Any) -> ClientResponse """Send request object according to configuration. Available kwargs: - session : will override the driver session and use yours. Should NOT be done unless really required. - A subset of what requests.Session.request can receive: - cookies - verify - timeout - allow_redirects - proxies - verify - cert Everything else will be silently ignored. :param ClientRequest request: The request object to be sent. """ requests_kwargs = self._configure_send(request, **kwargs) return super(RequestsHTTPSender, self).send(request, **requests_kwargs) class ClientRetryPolicy(object): """Retry configuration settings. Container for retry policy object. """ safe_codes = [i for i in range(500) if i != 408] + [501, 505] def __init__(self): self.policy = Retry() self.policy.total = 3 self.policy.connect = 3 self.policy.read = 3 self.policy.backoff_factor = 0.8 self.policy.BACKOFF_MAX = 90 retry_codes = [i for i in range(999) if i not in self.safe_codes] self.policy.status_forcelist = retry_codes self.policy.method_whitelist = ['HEAD', 'TRACE', 'GET', 'PUT', 'OPTIONS', 'DELETE', 'POST', 'PATCH'] def __call__(self): # type: () -> Retry """Return configuration to be applied to connection.""" debug = ("Configuring retry: max_retries=%r, " "backoff_factor=%r, max_backoff=%r") _LOGGER.debug( debug, self.retries, self.backoff_factor, self.max_backoff) return self.policy @property def retries(self): # type: () -> int """Total number of allowed retries.""" return self.policy.total @retries.setter def retries(self, value): # type: (int) -> None self.policy.total = value self.policy.connect = value self.policy.read = value @property def backoff_factor(self): # type: () -> Union[int, float] """Factor by which back-off delay is incementally increased.""" return self.policy.backoff_factor @backoff_factor.setter def backoff_factor(self, value): # type: (Union[int, float]) -> None self.policy.backoff_factor = value @property def max_backoff(self): # type: () -> int """Max retry back-off delay.""" return self.policy.BACKOFF_MAX @max_backoff.setter def max_backoff(self, value): # type: (int) -> None self.policy.BACKOFF_MAX = value def default_session_configuration_callback(session, global_config, local_config, **kwargs): # pylint: disable=unused-argument # type: (requests.Session, RequestHTTPSenderConfiguration, Dict[str,str], str) -> Dict[str, str] """Configuration callback if you need to change default session configuration. :param requests.Session session: The session. :param Configuration global_config: The global configuration. :param dict[str,str] local_config: The on-the-fly configuration passed on the call. :param dict[str,str] kwargs: The current computed values for session.request method. :return: Must return kwargs, to be passed to session.request. If None is return, initial kwargs will be used. :rtype: dict[str,str] """ return kwargs class RequestHTTPSenderConfiguration(HTTPSenderConfiguration): """Requests specific HTTP sender configuration. :param str filepath: Path to existing config file (optional). """ def __init__(self, filepath=None): # type: (Optional[str]) -> None super(RequestHTTPSenderConfiguration, self).__init__() # Retry configuration self.retry_policy = ClientRetryPolicy() # Requests hooks. Must respect requests hook callback signature # Note that we will inject the following parameters: # - kwargs['msrest']['session'] with the current session self.hooks = [] # type: List[Callable[[requests.Response, str, str], None]] self.session_configuration_callback = default_session_configuration_callback if filepath: self.load(filepath) def save(self, filepath): """Save current configuration to file. :param str filepath: Path to file where settings will be saved. :raises: ValueError if supplied filepath cannot be written to. """ self._config.add_section("RetryPolicy") self._config.set("RetryPolicy", "retries", str(self.retry_policy.retries)) self._config.set("RetryPolicy", "backoff_factor", str(self.retry_policy.backoff_factor)) self._config.set("RetryPolicy", "max_backoff", str(self.retry_policy.max_backoff)) super(RequestHTTPSenderConfiguration, self).save(filepath) def load(self, filepath): try: self.retry_policy.retries = \ self._config.getint("RetryPolicy", "retries") self.retry_policy.backoff_factor = \ self._config.getfloat("RetryPolicy", "backoff_factor") self.retry_policy.max_backoff = \ self._config.getint("RetryPolicy", "max_backoff") except (ValueError, EnvironmentError, NoOptionError): error = "Supplied config file incompatible." raise_with_traceback(ValueError, error) finally: self._clear_config() super(RequestHTTPSenderConfiguration, self).load(filepath) msrest-for-python-0.6.21/msrest/version.py000066400000000000000000000025601400412460500205730ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- #: version of this package. Use msrest.__version__ instead msrest_version = "0.6.21" msrest-for-python-0.6.21/pylintrc000066400000000000000000000032101400412460500167770ustar00rootroot00000000000000[MASTER] ignore-patterns=test_* reports=no [MESSAGES CONTROL] # For all codes, run 'pylint --list-msgs' or go to 'https://pylint.readthedocs.io/en/latest/reference_guide/features.html' # locally-disabled: Warning locally suppressed using disable-msg # cyclic-import: because of https://github.com/PyCQA/pylint/issues/850 # too-many-arguments: Due to the nature of the CLI many commands have large arguments set which reflect in large arguments set in corresponding methods. disable=missing-docstring,locally-disabled,fixme,cyclic-import,too-many-arguments,invalid-name,duplicate-code [FORMAT] max-line-length=120 [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=yes [DESIGN] # Maximum number of locals for function / method body max-locals=25 # Maximum number of branch for function / method body max-branches=20 [SIMILARITIES] min-similarity-lines=10 [BASIC] # Naming hints based on PEP 8 (https://www.python.org/dev/peps/pep-0008/#naming-conventions). # Consider these guidelines and not hard rules. Read PEP 8 for more details. # The invalid-name checker must be **enabled** for these hints to be used. include-naming-hint=yes module-name-hint=lowercase (keep short; underscores are discouraged) const-name-hint=UPPER_CASE_WITH_UNDERSCORES class-name-hint=CapitalizedWords class-attribute-name-hint=lower_case_with_underscores attr-name-hint=lower_case_with_underscores method-name-hint=lower_case_with_underscores function-name-hint=lower_case_with_underscores argument-name-hint=lower_case_with_underscores variable-name-hint=lower_case_with_underscores inlinevar-name-hint=lower_case_with_underscores (short is OK)msrest-for-python-0.6.21/setup.cfg000066400000000000000000000001471400412460500170370ustar00rootroot00000000000000[bdist_wheel] universal=1 [mypy] ignore_missing_imports = True [tool:pytest] addopts = --durations=10msrest-for-python-0.6.21/setup.py000066400000000000000000000052651400412460500167360ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- from setuptools import setup, find_packages setup( name='msrest', version='0.6.21', author='Microsoft Corporation', packages=find_packages(exclude=["tests", "tests.*"]), url=("https://github.com/Azure/msrest-for-python"), license='MIT License', description='AutoRest swagger generator Python client runtime.', long_description=open('README.rst').read(), classifiers=[ 'Development Status :: 4 - Beta', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development'], install_requires=[ "requests~=2.16", "requests_oauthlib>=0.5.0", "isodate>=0.6.0", "certifi>=2017.4.17", ], include_package_data=True, package_data={ 'pytyped': ['py.typed'], }, extras_require={ ":python_version<'3.4'": ['enum34>=1.0.4'], ":python_version<'3.5'": ['typing'], "async:python_version>='3.5'": [ 'aiohttp>=3.0', 'aiodns' ], } ) msrest-for-python-0.6.21/tests/000077500000000000000000000000001400412460500163565ustar00rootroot00000000000000msrest-for-python-0.6.21/tests/__init__.py000066400000000000000000000032161400412460500204710ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import os from unittest import TestLoader, TextTestRunner import logging #logging.basicConfig(level=logging.DEBUG, filename="d:/log.txt") if __name__ == '__main__': runner = TextTestRunner(verbosity=2) test_dir = os.path.dirname(__file__) test_loader = TestLoader() suite = test_loader.discover(test_dir, pattern="unittest_*.py") runner.run(suite) msrest-for-python-0.6.21/tests/asynctests/000077500000000000000000000000001400412460500205565ustar00rootroot00000000000000msrest-for-python-0.6.21/tests/asynctests/test_async_client.py000066400000000000000000000150271400412460500246470ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import io import asyncio import json import unittest try: from unittest import mock except ImportError: import mock import sys import pytest import requests from requests.adapters import HTTPAdapter from oauthlib import oauth2 from msrest.async_client import ServiceClientAsync from msrest.authentication import OAuthTokenAuthentication from msrest.configuration import Configuration from msrest import Configuration from msrest.exceptions import ClientRequestError, TokenExpiredError from msrest.universal_http import ClientRequest from msrest.universal_http.async_requests import AsyncRequestsClientResponse @unittest.skipIf(sys.version_info < (3, 5, 2), "Async tests only on 3.5.2 minimal") class TestServiceClient(object): @pytest.mark.asyncio async def test_client_send(self): cfg = Configuration("/") cfg.headers = {'Test': 'true'} cfg.credentials = mock.create_autospec(OAuthTokenAuthentication) client = ServiceClientAsync(cfg) req_response = requests.Response() req_response._content = br'{"real": true}' # Has to be valid bytes JSON req_response._content_consumed = True req_response.status_code = 200 def side_effect(*args, **kwargs): return req_response session = mock.create_autospec(requests.Session) session.request.side_effect = side_effect session.adapters = { "http://": HTTPAdapter(), "https://": HTTPAdapter(), } # Be sure the mock does not trick me assert not hasattr(session.resolve_redirects, 'is_msrest_patched') client.config.pipeline._sender.driver.session = session client.config.credentials.signed_session.return_value = session client.config.credentials.refresh_session.return_value = session request = ClientRequest('GET', '/') await client.async_send(request, stream=False) session.request.call_count = 0 session.request.assert_called_with( 'GET', '/', allow_redirects=True, cert=None, headers={ 'User-Agent': cfg.user_agent, 'Test': 'true' # From global config }, stream=False, timeout=100, verify=True ) assert session.resolve_redirects.is_msrest_patched request = client.get('/', headers={'id':'1234'}, content={'Test':'Data'}) await client.async_send(request, stream=False) session.request.assert_called_with( 'GET', '/', data='{"Test": "Data"}', allow_redirects=True, cert=None, headers={ 'User-Agent': cfg.user_agent, 'Content-Length': '16', 'id':'1234', 'Accept': 'application/json', 'Test': 'true' # From global config }, stream=False, timeout=100, verify=True ) assert session.request.call_count == 1 session.request.call_count = 0 assert session.resolve_redirects.is_msrest_patched request = client.get('/', headers={'id':'1234'}, content={'Test':'Data'}) session.request.side_effect = requests.RequestException("test") with pytest.raises(ClientRequestError): await client.async_send(request, test='value', stream=False) session.request.assert_called_with( 'GET', '/', data='{"Test": "Data"}', allow_redirects=True, cert=None, headers={ 'User-Agent': cfg.user_agent, 'Content-Length': '16', 'id':'1234', 'Accept': 'application/json', 'Test': 'true' # From global config }, stream=False, timeout=100, verify=True ) assert session.request.call_count == 1 session.request.call_count = 0 assert session.resolve_redirects.is_msrest_patched session.request.side_effect = oauth2.rfc6749.errors.InvalidGrantError("test") with pytest.raises(TokenExpiredError): await client.async_send(request, headers={'id':'1234'}, content={'Test':'Data'}, test='value') assert session.request.call_count == 2 session.request.call_count = 0 session.request.side_effect = ValueError("test") with pytest.raises(ValueError): await client.async_send(request, headers={'id':'1234'}, content={'Test':'Data'}, test='value') @pytest.mark.asyncio async def test_client_stream_download(self): req_response = requests.Response() req_response._content = "abc" req_response._content_consumed = True req_response.status_code = 200 client_response = AsyncRequestsClientResponse( None, req_response ) def user_callback(chunk, response): assert response is req_response assert chunk in ["a", "b", "c"] async_iterator = client_response.stream_download(1, user_callback) result = "" async for value in async_iterator: result += value assert result == "abc" if __name__ == '__main__': unittest.main()msrest-for-python-0.6.21/tests/asynctests/test_async_paging.py000066400000000000000000000136731400412460500246430ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import sys import unittest import pytest from msrest.paging import Paged class FakePaged(Paged): _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[str]'} } def __init__(self, *args, **kwargs): super(FakePaged, self).__init__(*args, **kwargs) class TestPaging(object): @pytest.mark.asyncio async def test_basic_paging(self): async def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } else: return { 'nextLink': None, 'value': ['value2.0', 'value2.1'] } deserialized = FakePaged(None, {}, async_command=internal_paging) # 3.6 only : result_iterated = [obj async for obj in deserialized] result_iterated = [] async for obj in deserialized: result_iterated.append(obj) assert ['value1.0', 'value1.1', 'value2.0', 'value2.1'] == result_iterated @pytest.mark.asyncio async def test_advance_paging(self): async def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } else: return { 'nextLink': None, 'value': ['value2.0', 'value2.1'] } deserialized = FakePaged(None, {}, async_command=internal_paging) page1 = await deserialized.async_advance_page() assert ['value1.0', 'value1.1'] == page1 page2 = await deserialized.async_advance_page() assert ['value2.0', 'value2.1'] == page2 with pytest.raises(StopAsyncIteration): await deserialized.async_advance_page() @pytest.mark.asyncio async def test_get_paging(self): async def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } elif next_link == 'page2': return { 'nextLink': 'page3', 'value': ['value2.0', 'value2.1'] } else: return { 'nextLink': None, 'value': ['value3.0', 'value3.1'] } deserialized = FakePaged(None, {}, async_command=internal_paging) page2 = await deserialized.async_get('page2') assert ['value2.0', 'value2.1'] == page2 page3 = await deserialized.async_get('page3') assert ['value3.0', 'value3.1'] == page3 @pytest.mark.asyncio async def test_reset_paging(self): async def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } else: return { 'nextLink': None, 'value': ['value2.0', 'value2.1'] } deserialized = FakePaged(None, {}, async_command=internal_paging) deserialized.reset() # 3.6 only : result_iterated = [obj async for obj in deserialized] result_iterated = [] async for obj in deserialized: result_iterated.append(obj) assert ['value1.0', 'value1.1', 'value2.0', 'value2.1'] == result_iterated deserialized = FakePaged(None, {}, async_command=internal_paging) # Push the iterator to the last element async for element in deserialized: if element == "value2.0": break deserialized.reset() # 3.6 only : result_iterated = [obj async for obj in deserialized] result_iterated = [] async for obj in deserialized: result_iterated.append(obj) assert ['value1.0', 'value1.1', 'value2.0', 'value2.1'] == result_iterated @pytest.mark.asyncio async def test_none_value(self): async def internal_paging(next_link=None, raw=False): return { 'nextLink': None, 'value': None } deserialized = FakePaged(None, {}, async_command=internal_paging) # 3.6 only : result_iterated = [obj async for obj in deserialized] result_iterated = [] async for obj in deserialized: result_iterated.append(obj) assert len(result_iterated) == 0 msrest-for-python-0.6.21/tests/asynctests/test_pipeline.py000066400000000000000000000104271400412460500240000ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import sys from msrest.universal_http import ( ClientRequest, ) from msrest.universal_http.async_requests import ( AsyncRequestsHTTPSender, AsyncTrioRequestsHTTPSender, ) from msrest.pipeline import ( AsyncPipeline, AsyncHTTPSender, SansIOHTTPPolicy ) from msrest.pipeline.async_requests import AsyncPipelineRequestsHTTPSender from msrest.pipeline.universal import UserAgentPolicy from msrest.pipeline.aiohttp import AioHTTPSender from msrest.configuration import Configuration import trio import pytest @pytest.mark.asyncio async def test_sans_io_exception(): class BrokenSender(AsyncHTTPSender): async def send(self, request, **config): raise ValueError("Broken") async def __aexit__(self, exc_type, exc_value, traceback): """Raise any exception triggered within the runtime context.""" return None pipeline = AsyncPipeline([SansIOHTTPPolicy()], BrokenSender()) req = ClientRequest('GET', '/') with pytest.raises(ValueError): await pipeline.run(req) class SwapExec(SansIOHTTPPolicy): def on_exception(self, requests, **kwargs): exc_type, exc_value, exc_traceback = sys.exc_info() raise NotImplementedError(exc_value) pipeline = AsyncPipeline([SwapExec()], BrokenSender()) with pytest.raises(NotImplementedError): await pipeline.run(req) @pytest.mark.asyncio async def test_basic_aiohttp(): request = ClientRequest("GET", "http://bing.com") policies = [ UserAgentPolicy("myusergant") ] async with AsyncPipeline(policies) as pipeline: response = await pipeline.run(request) assert pipeline._sender.driver._session.closed assert response.http_response.status_code == 200 @pytest.mark.asyncio async def test_basic_async_requests(): request = ClientRequest("GET", "http://bing.com") policies = [ UserAgentPolicy("myusergant") ] async with AsyncPipeline(policies, AsyncPipelineRequestsHTTPSender()) as pipeline: response = await pipeline.run(request) assert response.http_response.status_code == 200 @pytest.mark.asyncio async def test_conf_async_requests(): conf = Configuration("http://bing.com/") request = ClientRequest("GET", "http://bing.com/") policies = [ UserAgentPolicy("myusergant") ] async with AsyncPipeline(policies, AsyncPipelineRequestsHTTPSender(AsyncRequestsHTTPSender(conf))) as pipeline: response = await pipeline.run(request) assert response.http_response.status_code == 200 def test_conf_async_trio_requests(): async def do(): conf = Configuration("http://bing.com/") request = ClientRequest("GET", "http://bing.com/") policies = [ UserAgentPolicy("myusergant") ] async with AsyncPipeline(policies, AsyncPipelineRequestsHTTPSender(AsyncTrioRequestsHTTPSender(conf))) as pipeline: return await pipeline.run(request) response = trio.run(do) assert response.http_response.status_code == 200msrest-for-python-0.6.21/tests/asynctests/test_polling.py000066400000000000000000000127601400412460500236410ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import asyncio try: from unittest import mock except ImportError: import mock import pytest from msrest.polling.async_poller import * from msrest.async_client import ServiceClientAsync from msrest.serialization import Model from msrest.configuration import Configuration @pytest.mark.asyncio async def test_abc_polling(): abc_polling = AsyncPollingMethod() with pytest.raises(NotImplementedError): abc_polling.initialize(None, None, None) with pytest.raises(NotImplementedError): await abc_polling.run() with pytest.raises(NotImplementedError): abc_polling.status() with pytest.raises(NotImplementedError): abc_polling.finished() with pytest.raises(NotImplementedError): abc_polling.resource() @pytest.mark.asyncio async def test_no_polling(): no_polling = AsyncNoPolling() initial_response = "initial response" def deserialization_cb(response): assert response == initial_response return "Treated: "+response no_polling.initialize(None, initial_response, deserialization_cb) await no_polling.run() # Should no raise and do nothing assert no_polling.status() == "succeeded" assert no_polling.finished() assert no_polling.resource() == "Treated: "+initial_response class PollingTwoSteps(AsyncPollingMethod): """An empty poller that returns the deserialized initial response. """ def __init__(self, sleep=0): self._initial_response = None self._deserialization_callback = None self._sleep = sleep def initialize(self, _, initial_response, deserialization_callback): self._initial_response = initial_response self._deserialization_callback = deserialization_callback self._finished = False async def run(self): """Empty run, no polling. """ self._finished = True await asyncio.sleep(self._sleep) # Give me time to add callbacks! def status(self): """Return the current status as a string. :rtype: str """ return "succeeded" if self._finished else "running" def finished(self): """Is this polling finished? :rtype: bool """ return self._finished def resource(self): return self._deserialization_callback(self._initial_response) @pytest.fixture def client(): # We need a ServiceClientAsync instance, but the poller itself don't use it, so we don't need # Something functional return ServiceClientAsync(Configuration("http://example.org")) @pytest.mark.asyncio async def test_poller(client): # Same the poller itself doesn't care about the initial_response, and there is no type constraint here initial_response = "Initial response" # Same for deserialization_callback, just pass to the polling_method def deserialization_callback(response): assert response == initial_response return "Treated: "+response method = AsyncNoPolling() result = await async_poller(client, initial_response, deserialization_callback, method) assert result == "Treated: "+initial_response # Test with a basic Model class MockedModel(Model): called = False @classmethod def deserialize(cls, data): assert data == initial_response cls.called = True result = await async_poller(client, initial_response, MockedModel, method) assert MockedModel.called # Test poller that method do a run method = PollingTwoSteps(sleep=2) result = await async_poller(client, initial_response, deserialization_callback, method) assert result == "Treated: "+initial_response @pytest.mark.asyncio async def test_broken_poller(client): with pytest.raises(ValueError): await async_poller(None, None, None, None) class NoPollingError(PollingTwoSteps): async def run(self): raise ValueError("Something bad happened") initial_response = "Initial response" def deserialization_callback(response): return "Treated: "+response method = NoPollingError() with pytest.raises(ValueError) as excinfo: await async_poller(client, initial_response, deserialization_callback, method) assert "Something bad happened" in str(excinfo.value) msrest-for-python-0.6.21/tests/asynctests/test_universal_http.py000066400000000000000000000057221400412460500252440ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import sys from msrest.universal_http import ( ClientRequest, AsyncHTTPSender, ) from msrest.universal_http.aiohttp import AioHTTPSender from msrest.universal_http.async_requests import ( AsyncBasicRequestsHTTPSender, AsyncRequestsHTTPSender, AsyncTrioRequestsHTTPSender, ) from msrest.configuration import Configuration import trio import pytest @pytest.mark.asyncio async def test_basic_aiohttp(): request = ClientRequest("GET", "http://bing.com") async with AioHTTPSender() as sender: response = await sender.send(request) assert response.body() is not None assert sender._session.closed assert response.status_code == 200 @pytest.mark.asyncio async def test_basic_async_requests(): request = ClientRequest("GET", "http://bing.com") async with AsyncBasicRequestsHTTPSender() as sender: response = await sender.send(request) assert response.body() is not None assert response.status_code == 200 @pytest.mark.asyncio async def test_conf_async_requests(): conf = Configuration("http://bing.com/") request = ClientRequest("GET", "http://bing.com/") async with AsyncRequestsHTTPSender(conf) as sender: response = await sender.send(request) assert response.body() is not None assert response.status_code == 200 def test_conf_async_trio_requests(): async def do(): conf = Configuration("http://bing.com/") request = ClientRequest("GET", "http://bing.com/") async with AsyncTrioRequestsHTTPSender(conf) as sender: return await sender.send(request) assert response.body() is not None response = trio.run(do) assert response.status_code == 200msrest-for-python-0.6.21/tests/conftest.py000066400000000000000000000026551400412460500205650ustar00rootroot00000000000000# -------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # -------------------------------------------------------------------------- import sys # Ignore collection of async tests for Python 2 collect_ignore = [] if sys.version_info < (3, 5): collect_ignore.append("asynctests") msrest-for-python-0.6.21/tests/storage_models/000077500000000000000000000000001400412460500213655ustar00rootroot00000000000000msrest-for-python-0.6.21/tests/storage_models/__init__.py000066400000000000000000000052651400412460500235060ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .storage_account_check_name_availability_parameters import StorageAccountCheckNameAvailabilityParameters from .check_name_availability_result import CheckNameAvailabilityResult from .sku import Sku from .custom_domain import CustomDomain from .encryption_service import EncryptionService from .encryption_services import EncryptionServices from .encryption import Encryption from .storage_account_create_parameters import StorageAccountCreateParameters from .endpoints import Endpoints from .storage_account import StorageAccount from .storage_account_key import StorageAccountKey from .storage_account_list_keys_result import StorageAccountListKeysResult from .storage_account_regenerate_key_parameters import StorageAccountRegenerateKeyParameters from .storage_account_update_parameters import StorageAccountUpdateParameters from .usage_name import UsageName from .usage import Usage from .resource import Resource from .account_sas_parameters import AccountSasParameters from .list_account_sas_response import ListAccountSasResponse from .service_sas_parameters import ServiceSasParameters from .list_service_sas_response import ListServiceSasResponse from .storage_account_paged import StorageAccountPaged from .usage_paged import UsagePaged from .storage_management_client_enums import ( Reason, SkuName, SkuTier, AccessTier, Kind, ProvisioningState, AccountStatus, KeyPermission, UsageUnit, HttpProtocol, ) __all__ = [ 'StorageAccountCheckNameAvailabilityParameters', 'CheckNameAvailabilityResult', 'Sku', 'CustomDomain', 'EncryptionService', 'EncryptionServices', 'Encryption', 'StorageAccountCreateParameters', 'Endpoints', 'StorageAccount', 'StorageAccountKey', 'StorageAccountListKeysResult', 'StorageAccountRegenerateKeyParameters', 'StorageAccountUpdateParameters', 'UsageName', 'Usage', 'Resource', 'AccountSasParameters', 'ListAccountSasResponse', 'ServiceSasParameters', 'ListServiceSasResponse', 'StorageAccountPaged', 'UsagePaged', 'Reason', 'SkuName', 'SkuTier', 'AccessTier', 'Kind', 'ProvisioningState', 'AccountStatus', 'KeyPermission', 'UsageUnit', 'HttpProtocol', ] msrest-for-python-0.6.21/tests/storage_models/account_sas_parameters.py000066400000000000000000000073701400412460500264730ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class AccountSasParameters(Model): """The parameters to list SAS credentials of a storage account. :param services: The signed services accessible with the account SAS. Possible values include: Blob (b), Queue (q), Table (t), File (f). Possible values include: 'b', 'q', 't', 'f' :type services: str or :class:`enum ` :param resource_types: The signed resource types that are accessible with the account SAS. Service (s): Access to service-level APIs; Container (c): Access to container-level APIs; Object (o): Access to object-level APIs for blobs, queue messages, table entities, and files. Possible values include: 's', 'c', 'o' :type resource_types: str or :class:`enum ` :param permissions: The signed permissions for the account SAS. Possible values include: Read (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Possible values include: 'r', 'd', 'w', 'l', 'a', 'c', 'u', 'p' :type permissions: str or :class:`enum ` :param ip_address_or_range: An IP address or a range of IP addresses from which to accept requests. :type ip_address_or_range: str :param protocols: The protocol permitted for a request made with the account SAS. Possible values include: 'https,http', 'https' :type protocols: str or :class:`HttpProtocol ` :param shared_access_start_time: The time at which the SAS becomes valid. :type shared_access_start_time: datetime :param shared_access_expiry_time: The time at which the shared access signature becomes invalid. :type shared_access_expiry_time: datetime :param key_to_sign: The key to sign the account SAS token with. :type key_to_sign: str """ _validation = { 'services': {'required': True}, 'resource_types': {'required': True}, 'permissions': {'required': True}, 'shared_access_expiry_time': {'required': True}, } _attribute_map = { 'services': {'key': 'signedServices', 'type': 'str'}, 'resource_types': {'key': 'signedResourceTypes', 'type': 'str'}, 'permissions': {'key': 'signedPermission', 'type': 'str'}, 'ip_address_or_range': {'key': 'signedIp', 'type': 'str'}, 'protocols': {'key': 'signedProtocol', 'type': 'HttpProtocol'}, 'shared_access_start_time': {'key': 'signedStart', 'type': 'iso-8601'}, 'shared_access_expiry_time': {'key': 'signedExpiry', 'type': 'iso-8601'}, 'key_to_sign': {'key': 'keyToSign', 'type': 'str'}, } def __init__(self, services, resource_types, permissions, shared_access_expiry_time, ip_address_or_range=None, protocols=None, shared_access_start_time=None, key_to_sign=None): self.services = services self.resource_types = resource_types self.permissions = permissions self.ip_address_or_range = ip_address_or_range self.protocols = protocols self.shared_access_start_time = shared_access_start_time self.shared_access_expiry_time = shared_access_expiry_time self.key_to_sign = key_to_sign msrest-for-python-0.6.21/tests/storage_models/check_name_availability_result.py000066400000000000000000000035251400412460500301510ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class CheckNameAvailabilityResult(Model): """The CheckNameAvailability operation response. Variables are only populated by the server, and will be ignored when sending a request. :ivar name_available: Gets a boolean value that indicates whether the name is available for you to use. If true, the name is available. If false, the name has already been taken or is invalid and cannot be used. :vartype name_available: bool :ivar reason: Gets the reason that a storage account name could not be used. The Reason element is only returned if NameAvailable is false. Possible values include: 'AccountNameInvalid', 'AlreadyExists' :vartype reason: str or :class:`Reason ` :ivar message: Gets an error message explaining the Reason value in more detail. :vartype message: str """ _validation = { 'name_available': {'readonly': True}, 'reason': {'readonly': True}, 'message': {'readonly': True}, } _attribute_map = { 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, 'reason': {'key': 'reason', 'type': 'Reason'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__(self): self.name_available = None self.reason = None self.message = None msrest-for-python-0.6.21/tests/storage_models/custom_domain.py000066400000000000000000000024101400412460500245750ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class CustomDomain(Model): """The custom domain assigned to this storage account. This can be set via Update. :param name: Gets or sets the custom domain name assigned to the storage account. Name is the CNAME source. :type name: str :param use_sub_domain: Indicates whether indirect CName validation is enabled. Default value is false. This should only be set on updates. :type use_sub_domain: bool """ _validation = { 'name': {'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'use_sub_domain': {'key': 'useSubDomain', 'type': 'bool'}, } def __init__(self, name, use_sub_domain=None): self.name = name self.use_sub_domain = use_sub_domain msrest-for-python-0.6.21/tests/storage_models/encryption.py000066400000000000000000000026221400412460500241330ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Encryption(Model): """The encryption settings on the storage account. Variables are only populated by the server, and will be ignored when sending a request. :param services: List of services which support encryption. :type services: :class:`EncryptionServices ` :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Storage. Default value: "Microsoft.Storage" . :vartype key_source: str """ _validation = { 'key_source': {'required': True, 'constant': True}, } _attribute_map = { 'services': {'key': 'services', 'type': 'EncryptionServices'}, 'key_source': {'key': 'keySource', 'type': 'str'}, } key_source = "Microsoft.Storage" def __init__(self, services=None): self.services = services msrest-for-python-0.6.21/tests/storage_models/encryption_service.py000066400000000000000000000027551400412460500256620ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class EncryptionService(Model): """A service that allows server-side encryption to be used. Variables are only populated by the server, and will be ignored when sending a request. :param enabled: A boolean indicating whether or not the service encrypts the data as it is stored. :type enabled: bool :ivar last_enabled_time: Gets a rough estimate of the date/time when the encryption was last enabled by the user. Only returned when encryption is enabled. There might be some unencrypted blobs which were written after this time, as it is just a rough estimate. :vartype last_enabled_time: datetime """ _validation = { 'last_enabled_time': {'readonly': True}, } _attribute_map = { 'enabled': {'key': 'enabled', 'type': 'bool'}, 'last_enabled_time': {'key': 'lastEnabledTime', 'type': 'iso-8601'}, } def __init__(self, enabled=None): self.enabled = enabled self.last_enabled_time = None msrest-for-python-0.6.21/tests/storage_models/encryption_services.py000066400000000000000000000036371400412460500260450ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class EncryptionServices(Model): """A list of services that support encryption. Variables are only populated by the server, and will be ignored when sending a request. :param blob: The encryption function of the blob storage service. :type blob: :class:`EncryptionService ` :param file: The encryption function of the file storage service. :type file: :class:`EncryptionService ` :ivar table: The encryption function of the table storage service. :vartype table: :class:`EncryptionService ` :ivar queue: The encryption function of the queue storage service. :vartype queue: :class:`EncryptionService ` """ _validation = { 'table': {'readonly': True}, 'queue': {'readonly': True}, } _attribute_map = { 'blob': {'key': 'blob', 'type': 'EncryptionService'}, 'file': {'key': 'file', 'type': 'EncryptionService'}, 'table': {'key': 'table', 'type': 'EncryptionService'}, 'queue': {'key': 'queue', 'type': 'EncryptionService'}, } def __init__(self, blob=None, file=None): self.blob = blob self.file = file self.table = None self.queue = None msrest-for-python-0.6.21/tests/storage_models/endpoints.py000066400000000000000000000027701400412460500237500ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Endpoints(Model): """The URIs that are used to perform a retrieval of a public blob, queue, or table object. Variables are only populated by the server, and will be ignored when sending a request. :ivar blob: Gets the blob endpoint. :vartype blob: str :ivar queue: Gets the queue endpoint. :vartype queue: str :ivar table: Gets the table endpoint. :vartype table: str :ivar file: Gets the file endpoint. :vartype file: str """ _validation = { 'blob': {'readonly': True}, 'queue': {'readonly': True}, 'table': {'readonly': True}, 'file': {'readonly': True}, } _attribute_map = { 'blob': {'key': 'blob', 'type': 'str'}, 'queue': {'key': 'queue', 'type': 'str'}, 'table': {'key': 'table', 'type': 'str'}, 'file': {'key': 'file', 'type': 'str'}, } def __init__(self): self.blob = None self.queue = None self.table = None self.file = None msrest-for-python-0.6.21/tests/storage_models/list_account_sas_response.py000066400000000000000000000020411400412460500272070ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class ListAccountSasResponse(Model): """The List SAS credentials operation response. Variables are only populated by the server, and will be ignored when sending a request. :ivar account_sas_token: List SAS credentials of storage account. :vartype account_sas_token: str """ _validation = { 'account_sas_token': {'readonly': True}, } _attribute_map = { 'account_sas_token': {'key': 'accountSasToken', 'type': 'str'}, } def __init__(self): self.account_sas_token = None msrest-for-python-0.6.21/tests/storage_models/list_service_sas_response.py000066400000000000000000000020701400412460500272150ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class ListServiceSasResponse(Model): """The List service SAS credentials operation response. Variables are only populated by the server, and will be ignored when sending a request. :ivar service_sas_token: List service SAS credentials of specific resource. :vartype service_sas_token: str """ _validation = { 'service_sas_token': {'readonly': True}, } _attribute_map = { 'service_sas_token': {'key': 'serviceSasToken', 'type': 'str'}, } def __init__(self): self.service_sas_token = None msrest-for-python-0.6.21/tests/storage_models/resource.py000066400000000000000000000031421400412460500235660ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Resource(Model): """Describes a storage resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource Id :vartype id: str :ivar name: Resource name :vartype name: str :ivar type: Resource type :vartype type: str :param location: Resource location :type location: str :param tags: Tags assigned to a resource; can be used for viewing and grouping a resource (across resource groups). :type tags: dict """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__(self, location=None, tags=None): self.id = None self.name = None self.type = None self.location = location self.tags = tags msrest-for-python-0.6.21/tests/storage_models/service_sas_parameters.py000066400000000000000000000133761400412460500265020ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class ServiceSasParameters(Model): """The parameters to list service SAS credentials of a specific resource. :param canonicalized_resource: The canonical path to the signed resource. :type canonicalized_resource: str :param resource: The signed services accessible with the service SAS. Possible values include: Blob (b), Container (c), File (f), Share (s). Possible values include: 'b', 'c', 'f', 's' :type resource: str or :class:`enum ` :param permissions: The signed permissions for the service SAS. Possible values include: Read (r), Write (w), Delete (d), List (l), Add (a), Create (c), Update (u) and Process (p). Possible values include: 'r', 'd', 'w', 'l', 'a', 'c', 'u', 'p' :type permissions: str or :class:`enum ` :param ip_address_or_range: An IP address or a range of IP addresses from which to accept requests. :type ip_address_or_range: str :param protocols: The protocol permitted for a request made with the account SAS. Possible values include: 'https,http', 'https' :type protocols: str or :class:`HttpProtocol ` :param shared_access_start_time: The time at which the SAS becomes valid. :type shared_access_start_time: datetime :param shared_access_expiry_time: The time at which the shared access signature becomes invalid. :type shared_access_expiry_time: datetime :param identifier: A unique value up to 64 characters in length that correlates to an access policy specified for the container, queue, or table. :type identifier: str :param partition_key_start: The start of partition key. :type partition_key_start: str :param partition_key_end: The end of partition key. :type partition_key_end: str :param row_key_start: The start of row key. :type row_key_start: str :param row_key_end: The end of row key. :type row_key_end: str :param key_to_sign: The key to sign the account SAS token with. :type key_to_sign: str :param cache_control: The response header override for cache control. :type cache_control: str :param content_disposition: The response header override for content disposition. :type content_disposition: str :param content_encoding: The response header override for content encoding. :type content_encoding: str :param content_language: The response header override for content language. :type content_language: str :param content_type: The response header override for content type. :type content_type: str """ _validation = { 'canonicalized_resource': {'required': True}, 'resource': {'required': True}, 'identifier': {'max_length': 64}, } _attribute_map = { 'canonicalized_resource': {'key': 'canonicalizedResource', 'type': 'str'}, 'resource': {'key': 'signedResource', 'type': 'str'}, 'permissions': {'key': 'signedPermission', 'type': 'str'}, 'ip_address_or_range': {'key': 'signedIp', 'type': 'str'}, 'protocols': {'key': 'signedProtocol', 'type': 'HttpProtocol'}, 'shared_access_start_time': {'key': 'signedStart', 'type': 'iso-8601'}, 'shared_access_expiry_time': {'key': 'signedExpiry', 'type': 'iso-8601'}, 'identifier': {'key': 'signedIdentifier', 'type': 'str'}, 'partition_key_start': {'key': 'startPk', 'type': 'str'}, 'partition_key_end': {'key': 'endPk', 'type': 'str'}, 'row_key_start': {'key': 'startRk', 'type': 'str'}, 'row_key_end': {'key': 'endRk', 'type': 'str'}, 'key_to_sign': {'key': 'keyToSign', 'type': 'str'}, 'cache_control': {'key': 'rscc', 'type': 'str'}, 'content_disposition': {'key': 'rscd', 'type': 'str'}, 'content_encoding': {'key': 'rsce', 'type': 'str'}, 'content_language': {'key': 'rscl', 'type': 'str'}, 'content_type': {'key': 'rsct', 'type': 'str'}, } def __init__(self, canonicalized_resource, resource, permissions=None, ip_address_or_range=None, protocols=None, shared_access_start_time=None, shared_access_expiry_time=None, identifier=None, partition_key_start=None, partition_key_end=None, row_key_start=None, row_key_end=None, key_to_sign=None, cache_control=None, content_disposition=None, content_encoding=None, content_language=None, content_type=None): self.canonicalized_resource = canonicalized_resource self.resource = resource self.permissions = permissions self.ip_address_or_range = ip_address_or_range self.protocols = protocols self.shared_access_start_time = shared_access_start_time self.shared_access_expiry_time = shared_access_expiry_time self.identifier = identifier self.partition_key_start = partition_key_start self.partition_key_end = partition_key_end self.row_key_start = row_key_start self.row_key_end = row_key_end self.key_to_sign = key_to_sign self.cache_control = cache_control self.content_disposition = content_disposition self.content_encoding = content_encoding self.content_language = content_language self.content_type = content_type msrest-for-python-0.6.21/tests/storage_models/sku.py000066400000000000000000000030511400412460500225400ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Sku(Model): """The SKU of the storage account. Variables are only populated by the server, and will be ignored when sending a request. :param name: Gets or sets the sku name. Required for account creation; optional for update. Note that in older versions, sku name was called accountType. Possible values include: 'Standard_LRS', 'Standard_GRS', 'Standard_RAGRS', 'Standard_ZRS', 'Premium_LRS' :type name: str or :class:`SkuName ` :ivar tier: Gets the sku tier. This is based on the SKU name. Possible values include: 'Standard', 'Premium' :vartype tier: str or :class:`SkuTier ` """ _validation = { 'name': {'required': True}, 'tier': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'SkuName'}, 'tier': {'key': 'tier', 'type': 'SkuTier'}, } def __init__(self, name): self.name = name self.tier = None msrest-for-python-0.6.21/tests/storage_models/storage_account.py000066400000000000000000000171251400412460500251250ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .resource import Resource class StorageAccount(Resource): """The storage account. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource Id :vartype id: str :ivar name: Resource name :vartype name: str :ivar type: Resource type :vartype type: str :param location: Resource location :type location: str :param tags: Tags assigned to a resource; can be used for viewing and grouping a resource (across resource groups). :type tags: dict :ivar sku: Gets the SKU. :vartype sku: :class:`Sku ` :ivar kind: Gets the Kind. Possible values include: 'Storage', 'BlobStorage' :vartype kind: str or :class:`Kind ` :ivar provisioning_state: Gets the status of the storage account at the time the operation was called. Possible values include: 'Creating', 'ResolvingDNS', 'Succeeded' :vartype provisioning_state: str or :class:`ProvisioningState ` :ivar primary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, queue, or table object. Note that Standard_ZRS and Premium_LRS accounts only return the blob endpoint. :vartype primary_endpoints: :class:`Endpoints ` :ivar primary_location: Gets the location of the primary data center for the storage account. :vartype primary_location: str :ivar status_of_primary: Gets the status indicating whether the primary location of the storage account is available or unavailable. Possible values include: 'available', 'unavailable' :vartype status_of_primary: str or :class:`AccountStatus ` :ivar last_geo_failover_time: Gets the timestamp of the most recent instance of a failover to the secondary location. Only the most recent timestamp is retained. This element is not returned if there has never been a failover instance. Only available if the accountType is Standard_GRS or Standard_RAGRS. :vartype last_geo_failover_time: datetime :ivar secondary_location: Gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS. :vartype secondary_location: str :ivar status_of_secondary: Gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS. Possible values include: 'available', 'unavailable' :vartype status_of_secondary: str or :class:`AccountStatus ` :ivar creation_time: Gets the creation date and time of the storage account in UTC. :vartype creation_time: datetime :ivar custom_domain: Gets the custom domain the user assigned to this storage account. :vartype custom_domain: :class:`CustomDomain ` :ivar secondary_endpoints: Gets the URLs that are used to perform a retrieval of a public blob, queue, or table object from the secondary location of the storage account. Only available if the SKU name is Standard_RAGRS. :vartype secondary_endpoints: :class:`Endpoints ` :ivar encryption: Gets the encryption settings on the account. If unspecified, the account is unencrypted. :vartype encryption: :class:`Encryption ` :ivar access_tier: Required for storage accounts where kind = BlobStorage. The access tier used for billing. Possible values include: 'Hot', 'Cool' :vartype access_tier: str or :class:`AccessTier ` :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. Default value: False . :type enable_https_traffic_only: bool """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'sku': {'readonly': True}, 'kind': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'primary_endpoints': {'readonly': True}, 'primary_location': {'readonly': True}, 'status_of_primary': {'readonly': True}, 'last_geo_failover_time': {'readonly': True}, 'secondary_location': {'readonly': True}, 'status_of_secondary': {'readonly': True}, 'creation_time': {'readonly': True}, 'custom_domain': {'readonly': True}, 'secondary_endpoints': {'readonly': True}, 'encryption': {'readonly': True}, 'access_tier': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'sku', 'type': 'Sku'}, 'kind': {'key': 'kind', 'type': 'Kind'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'}, 'primary_endpoints': {'key': 'properties.primaryEndpoints', 'type': 'Endpoints'}, 'primary_location': {'key': 'properties.primaryLocation', 'type': 'str'}, 'status_of_primary': {'key': 'properties.statusOfPrimary', 'type': 'AccountStatus'}, 'last_geo_failover_time': {'key': 'properties.lastGeoFailoverTime', 'type': 'iso-8601'}, 'secondary_location': {'key': 'properties.secondaryLocation', 'type': 'str'}, 'status_of_secondary': {'key': 'properties.statusOfSecondary', 'type': 'AccountStatus'}, 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, 'secondary_endpoints': {'key': 'properties.secondaryEndpoints', 'type': 'Endpoints'}, 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, 'access_tier': {'key': 'properties.accessTier', 'type': 'AccessTier'}, 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, } def __init__(self, location=None, tags=None, enable_https_traffic_only=False): super(StorageAccount, self).__init__(location=location, tags=tags) self.sku = None self.kind = None self.provisioning_state = None self.primary_endpoints = None self.primary_location = None self.status_of_primary = None self.last_geo_failover_time = None self.secondary_location = None self.status_of_secondary = None self.creation_time = None self.custom_domain = None self.secondary_endpoints = None self.encryption = None self.access_tier = None self.enable_https_traffic_only = enable_https_traffic_only msrest-for-python-0.6.21/tests/storage_models/storage_account_check_name_availability_parameters.py000066400000000000000000000023301400412460500342270ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class StorageAccountCheckNameAvailabilityParameters(Model): """The parameters used to check the availability of the storage account name. Variables are only populated by the server, and will be ignored when sending a request. :param name: :type name: str :ivar type: Default value: "Microsoft.Storage/storageAccounts" . :vartype type: str """ _validation = { 'name': {'required': True}, 'type': {'required': True, 'constant': True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } type = "Microsoft.Storage/storageAccounts" def __init__(self, name): self.name = name msrest-for-python-0.6.21/tests/storage_models/storage_account_create_parameters.py000066400000000000000000000076331400412460500306760ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class StorageAccountCreateParameters(Model): """The parameters used when creating a storage account. :param sku: Required. Gets or sets the sku name. :type sku: :class:`Sku ` :param kind: Required. Indicates the type of storage account. Possible values include: 'Storage', 'BlobStorage' :type kind: str or :class:`Kind ` :param location: Required. Gets or sets the location of the resource. This will be one of the supported and registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). The geo region of a resource cannot be changed once it is created, but if an identical geo region is specified on update, the request will succeed. :type location: str :param tags: Gets or sets a list of key value pairs that describe the resource. These tags can be used for viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key with a length no greater than 128 characters and a value with a length no greater than 256 characters. :type tags: dict :param custom_domain: User domain assigned to the storage account. Name is the CNAME source. Only one custom domain is supported per storage account at this time. To clear the existing custom domain, use an empty string for the custom domain name property. :type custom_domain: :class:`CustomDomain ` :param encryption: Provides the encryption settings on the account. If left unspecified the account encryption settings will remain the same. The default setting is unencrypted. :type encryption: :class:`Encryption ` :param access_tier: Required for storage accounts where kind = BlobStorage. The access tier used for billing. Possible values include: 'Hot', 'Cool' :type access_tier: str or :class:`AccessTier ` :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. Default value: False . :type enable_https_traffic_only: bool """ _validation = { 'sku': {'required': True}, 'kind': {'required': True}, 'location': {'required': True}, } _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'kind': {'key': 'kind', 'type': 'Kind'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, 'access_tier': {'key': 'properties.accessTier', 'type': 'AccessTier'}, 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, } def __init__(self, sku, kind, location, tags=None, custom_domain=None, encryption=None, access_tier=None, enable_https_traffic_only=False): self.sku = sku self.kind = kind self.location = location self.tags = tags self.custom_domain = custom_domain self.encryption = encryption self.access_tier = access_tier self.enable_https_traffic_only = enable_https_traffic_only msrest-for-python-0.6.21/tests/storage_models/storage_account_key.py000066400000000000000000000030111400412460500257620ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class StorageAccountKey(Model): """An access key for the storage account. Variables are only populated by the server, and will be ignored when sending a request. :ivar key_name: Name of the key. :vartype key_name: str :ivar value: Base 64-encoded value of the key. :vartype value: str :ivar permissions: Permissions for the key -- read-only or full permissions. Possible values include: 'Read', 'Full' :vartype permissions: str or :class:`KeyPermission ` """ _validation = { 'key_name': {'readonly': True}, 'value': {'readonly': True}, 'permissions': {'readonly': True}, } _attribute_map = { 'key_name': {'key': 'keyName', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'permissions': {'key': 'permissions', 'type': 'KeyPermission'}, } def __init__(self): self.key_name = None self.value = None self.permissions = None msrest-for-python-0.6.21/tests/storage_models/storage_account_list_keys_result.py000066400000000000000000000022001400412460500305750ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class StorageAccountListKeysResult(Model): """The response from the ListKeys operation. Variables are only populated by the server, and will be ignored when sending a request. :ivar keys: Gets the list of storage account keys and their properties for the specified storage account. :vartype keys: list of :class:`StorageAccountKey ` """ _validation = { 'keys': {'readonly': True}, } _attribute_map = { 'keys': {'key': 'keys', 'type': '[StorageAccountKey]'}, } def __init__(self): self.keys = None msrest-for-python-0.6.21/tests/storage_models/storage_account_paged.py000066400000000000000000000017021400412460500262570ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.paging import Paged class StorageAccountPaged(Paged): """ A paging container for iterating over a list of :class:`StorageAccount ` object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[StorageAccount]'} } def __init__(self, *args, **kwargs): super(StorageAccountPaged, self).__init__(*args, **kwargs) msrest-for-python-0.6.21/tests/storage_models/storage_account_regenerate_key_parameters.py000066400000000000000000000016131400412460500324140ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class StorageAccountRegenerateKeyParameters(Model): """The parameters used to regenerate the storage account key. :param key_name: :type key_name: str """ _validation = { 'key_name': {'required': True}, } _attribute_map = { 'key_name': {'key': 'keyName', 'type': 'str'}, } def __init__(self, key_name): self.key_name = key_name msrest-for-python-0.6.21/tests/storage_models/storage_account_update_parameters.py000066400000000000000000000061721400412460500307120ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class StorageAccountUpdateParameters(Model): """The parameters that can be provided when updating the storage account properties. :param sku: Gets or sets the SKU name. Note that the SKU name cannot be updated to Standard_ZRS or Premium_LRS, nor can accounts of those sku names be updated to any other value. :type sku: :class:`Sku ` :param tags: Gets or sets a list of key value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater in length than 128 characters and a value no greater in length than 256 characters. :type tags: dict :param custom_domain: Custom domain assigned to the storage account by the user. Name is the CNAME source. Only one custom domain is supported per storage account at this time. To clear the existing custom domain, use an empty string for the custom domain name property. :type custom_domain: :class:`CustomDomain ` :param encryption: Provides the encryption settings on the account. The default setting is unencrypted. :type encryption: :class:`Encryption ` :param access_tier: Required for storage accounts where kind = BlobStorage. The access tier used for billing. Possible values include: 'Hot', 'Cool' :type access_tier: str or :class:`AccessTier ` :param enable_https_traffic_only: Allows https traffic only to storage service if sets to true. Default value: False . :type enable_https_traffic_only: bool """ _attribute_map = { 'sku': {'key': 'sku', 'type': 'Sku'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'custom_domain': {'key': 'properties.customDomain', 'type': 'CustomDomain'}, 'encryption': {'key': 'properties.encryption', 'type': 'Encryption'}, 'access_tier': {'key': 'properties.accessTier', 'type': 'AccessTier'}, 'enable_https_traffic_only': {'key': 'properties.supportsHttpsTrafficOnly', 'type': 'bool'}, } def __init__(self, sku=None, tags=None, custom_domain=None, encryption=None, access_tier=None, enable_https_traffic_only=False): self.sku = sku self.tags = tags self.custom_domain = custom_domain self.encryption = encryption self.access_tier = access_tier self.enable_https_traffic_only = enable_https_traffic_only msrest-for-python-0.6.21/tests/storage_models/storage_management_client_enums.py000066400000000000000000000030361400412460500303460ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from enum import Enum class Reason(Enum): account_name_invalid = "AccountNameInvalid" already_exists = "AlreadyExists" class SkuName(Enum): standard_lrs = "Standard_LRS" standard_grs = "Standard_GRS" standard_ragrs = "Standard_RAGRS" standard_zrs = "Standard_ZRS" premium_lrs = "Premium_LRS" class SkuTier(Enum): standard = "Standard" premium = "Premium" class AccessTier(Enum): hot = "Hot" cool = "Cool" class Kind(Enum): storage = "Storage" blob_storage = "BlobStorage" class ProvisioningState(Enum): creating = "Creating" resolving_dns = "ResolvingDNS" succeeded = "Succeeded" class AccountStatus(Enum): available = "available" unavailable = "unavailable" class KeyPermission(Enum): read = "Read" full = "Full" class UsageUnit(Enum): count = "Count" bytes = "Bytes" seconds = "Seconds" percent = "Percent" counts_per_second = "CountsPerSecond" bytes_per_second = "BytesPerSecond" class HttpProtocol(Enum): httpshttp = "https,http" https = "https" msrest-for-python-0.6.21/tests/storage_models/usage.py000066400000000000000000000035731400412460500230530ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Usage(Model): """Describes Storage Resource Usage. Variables are only populated by the server, and will be ignored when sending a request. :ivar unit: Gets the unit of measurement. Possible values include: 'Count', 'Bytes', 'Seconds', 'Percent', 'CountsPerSecond', 'BytesPerSecond' :vartype unit: str or :class:`UsageUnit ` :ivar current_value: Gets the current count of the allocated resources in the subscription. :vartype current_value: int :ivar limit: Gets the maximum count of the resources that can be allocated in the subscription. :vartype limit: int :ivar name: Gets the name of the type of usage. :vartype name: :class:`UsageName ` """ _validation = { 'unit': {'readonly': True}, 'current_value': {'readonly': True}, 'limit': {'readonly': True}, 'name': {'readonly': True}, } _attribute_map = { 'unit': {'key': 'unit', 'type': 'UsageUnit'}, 'current_value': {'key': 'currentValue', 'type': 'int'}, 'limit': {'key': 'limit', 'type': 'int'}, 'name': {'key': 'name', 'type': 'UsageName'}, } def __init__(self): self.unit = None self.current_value = None self.limit = None self.name = None msrest-for-python-0.6.21/tests/storage_models/usage_name.py000066400000000000000000000023731400412460500240500ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class UsageName(Model): """The usage names that can be used; currently limited to StorageAccount. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: Gets a string describing the resource name. :vartype value: str :ivar localized_value: Gets a localized string describing the resource name. :vartype localized_value: str """ _validation = { 'value': {'readonly': True}, 'localized_value': {'readonly': True}, } _attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'}, } def __init__(self): self.value = None self.localized_value = None msrest-for-python-0.6.21/tests/storage_models/usage_paged.py000066400000000000000000000016251400412460500242070ustar00rootroot00000000000000# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.paging import Paged class UsagePaged(Paged): """ A paging container for iterating over a list of :class:`Usage ` object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[Usage]'} } def __init__(self, *args, **kwargs): super(UsagePaged, self).__init__(*args, **kwargs) msrest-for-python-0.6.21/tests/test_auth.py000066400000000000000000000115421400412460500207330ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import os import sys import json import isodate from datetime import datetime import base64 from base64 import b64decode import unittest try: from unittest import mock except ImportError: import mock from msrest.authentication import ( BasicAuthentication, BasicTokenAuthentication, OAuthTokenAuthentication, ApiKeyCredentials, CognitiveServicesCredentials, TopicCredentials, DomainCredentials ) from requests import Request, PreparedRequest class TestAuthentication(unittest.TestCase): def setUp(self): self.request = mock.create_autospec(Request) self.request.headers = {} self.request.cookies = {} self.request.auth = None self.request.url = "http://my_endpoint.com" self.request.method = 'GET' self.request.files = None self.request.data = None self.request.json = None self.request.params = {} self.request.hooks = {} return super(TestAuthentication, self).setUp() def test_basic_auth(self): basic = BasicAuthentication("username", "password") session = basic.signed_session() req = session.auth(self.request) self.assertTrue('Authorization' in req.headers) self.assertTrue(req.headers['Authorization'].startswith('Basic ')) def test_basic_token_auth(self): token = { 'access_token': '123456789' } basic = BasicTokenAuthentication(token) basic.set_token() # Just check that this does not raise session = basic.signed_session() req = session.prepare_request(self.request) assert 'Authorization' in req.headers assert req.headers['Authorization'] == 'Bearer 123456789' def test_token_auth(self): token = { 'access_token': '123456789' } auth = OAuthTokenAuthentication("client_id", token) session = auth.signed_session() request = PreparedRequest() request.prepare("GET", "https://example.org") session.auth(request) assert request.headers == {'Authorization': 'Bearer 123456789'} def test_apikey_auth(self): auth = ApiKeyCredentials( in_headers={ 'testheader' : 'testheadervalue' } ) session = auth.signed_session() prep_req = session.prepare_request(self.request) self.assertDictContainsSubset({'testheader' : 'testheadervalue'}, prep_req.headers) auth = ApiKeyCredentials( in_query={ 'testquery' : 'testparamvalue' } ) session = auth.signed_session() prep_req = session.prepare_request(self.request) assert "testquery=testparamvalue" in prep_req.path_url def test_cs_auth(self): auth = CognitiveServicesCredentials("mysubkey") session = auth.signed_session() prep_req = session.prepare_request(self.request) self.assertDictContainsSubset({'Ocp-Apim-Subscription-Key' : 'mysubkey'}, prep_req.headers) def test_eventgrid_auth(self): auth = TopicCredentials("mytopickey") session = auth.signed_session() prep_req = session.prepare_request(self.request) self.assertDictContainsSubset({'aeg-sas-key' : 'mytopickey'}, prep_req.headers) def test_eventgrid_domain_auth(self): auth = DomainCredentials("mydomainkey") session = auth.signed_session() prep_req = session.prepare_request(self.request) self.assertDictContainsSubset({'aeg-sas-key' : 'mydomainkey'}, prep_req.headers) if __name__ == '__main__': unittest.main() msrest-for-python-0.6.21/tests/test_client.py000066400000000000000000000366611400412460500212610ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import io import json import unittest try: from unittest import mock except ImportError: import mock import sys import requests from requests.adapters import HTTPAdapter from oauthlib import oauth2 from msrest import ServiceClient, SDKClient from msrest.universal_http import ( ClientRequest, ClientResponse ) from msrest.universal_http.requests import ( RequestsHTTPSender, RequestsClientResponse ) from msrest.pipeline import ( HTTPSender, Response, ) from msrest.authentication import OAuthTokenAuthentication, Authentication from msrest import Configuration from msrest.exceptions import ClientRequestError, TokenExpiredError class TestServiceClient(unittest.TestCase): def setUp(self): self.cfg = Configuration("https://my_endpoint.com") self.cfg.headers = {'Test': 'true'} self.creds = mock.create_autospec(OAuthTokenAuthentication) self.cfg.credentials = self.creds return super(TestServiceClient, self).setUp() def test_deprecated_creds(self): """Test that creds parameters gets populated correctly. https://github.com/Azure/msrest-for-python/issues/135 """ cfg = Configuration("http://127.0.0.1/") assert cfg.credentials is None creds = Authentication() client = SDKClient(creds, cfg) assert cfg.credentials is creds def test_sdk_context_manager(self): cfg = Configuration("http://127.0.0.1/") class Creds(Authentication): def __init__(self): self.first_session = None self.called = 0 def signed_session(self, session=None): self.called += 1 assert session is not None if self.first_session: assert self.first_session is session else: self.first_session = session cfg.credentials = Creds() with SDKClient(None, cfg) as client: assert cfg.keep_alive req = client._client.get('/') try: # Will fail, I don't care, that's not the point of the test client._client.send(req, timeout=0) except Exception: pass try: # Will fail, I don't care, that's not the point of the test client._client.send(req, timeout=0) except Exception: pass assert not cfg.keep_alive assert cfg.credentials.called == 2 def test_context_manager(self): cfg = Configuration("http://127.0.0.1/") class Creds(Authentication): def __init__(self): self.first_session = None self.called = 0 def signed_session(self, session=None): self.called += 1 assert session is not None if self.first_session: assert self.first_session is session else: self.first_session = session cfg.credentials = Creds() with ServiceClient(None, cfg) as client: assert cfg.keep_alive req = client.get('/') try: # Will fail, I don't care, that's not the point of the test client.send(req, timeout=0) except Exception: pass try: # Will fail, I don't care, that's not the point of the test client.send(req, timeout=0) except Exception: pass assert not cfg.keep_alive assert cfg.credentials.called == 2 def test_keep_alive(self): cfg = Configuration("http://127.0.0.1/") cfg.keep_alive = True class Creds(Authentication): def __init__(self): self.first_session = None self.called = 0 def signed_session(self, session=None): self.called += 1 assert session is not None if self.first_session: assert self.first_session is session else: self.first_session = session cfg.credentials = Creds() client = ServiceClient(None, cfg) req = client.get('/') try: # Will fail, I don't care, that's not the point of the test client.send(req, timeout=0) except Exception: pass try: # Will fail, I don't care, that's not the point of the test client.send(req, timeout=0) except Exception: pass assert cfg.credentials.called == 2 # Manually close the client in "keep_alive" mode client.close() def test_client_request(self): cfg = Configuration("http://127.0.0.1/") client = ServiceClient(self.creds, cfg) obj = client.get('/') self.assertEqual(obj.method, 'GET') self.assertEqual(obj.url, "http://127.0.0.1/") obj = client.get("/service", {'param':"testing"}) self.assertEqual(obj.method, 'GET') self.assertEqual(obj.url, "http://127.0.0.1/service?param=testing") obj = client.get("service 2") self.assertEqual(obj.method, 'GET') self.assertEqual(obj.url, "http://127.0.0.1/service 2") cfg.base_url = "https://127.0.0.1/" obj = client.get("//service3") self.assertEqual(obj.method, 'GET') self.assertEqual(obj.url, "https://127.0.0.1/service3") obj = client.put('/') self.assertEqual(obj.method, 'PUT') obj = client.post('/') self.assertEqual(obj.method, 'POST') obj = client.head('/') self.assertEqual(obj.method, 'HEAD') obj = client.merge('/') self.assertEqual(obj.method, 'MERGE') obj = client.patch('/') self.assertEqual(obj.method, 'PATCH') obj = client.delete('/') self.assertEqual(obj.method, 'DELETE') def test_format_url(self): url = "/bool/test true" client = mock.create_autospec(ServiceClient) client.config = mock.Mock(base_url="http://localhost:3000") formatted = ServiceClient.format_url(client, url) self.assertEqual(formatted, "http://localhost:3000/bool/test true") client.config = mock.Mock(base_url="http://localhost:3000/") formatted = ServiceClient.format_url(client, url, foo=123, bar="value") self.assertEqual(formatted, "http://localhost:3000/bool/test true") url = "https://absolute_url.com/my/test/path" formatted = ServiceClient.format_url(client, url) self.assertEqual(formatted, "https://absolute_url.com/my/test/path") formatted = ServiceClient.format_url(client, url, foo=123, bar="value") self.assertEqual(formatted, "https://absolute_url.com/my/test/path") url = "test" formatted = ServiceClient.format_url(client, url) self.assertEqual(formatted, "http://localhost:3000/test") client.config = mock.Mock(base_url="http://{hostname}:{port}/{foo}/{bar}") formatted = ServiceClient.format_url(client, url, hostname="localhost", port="3000", foo=123, bar="value") self.assertEqual(formatted, "http://localhost:3000/123/value/test") client.config = mock.Mock(base_url="https://my_endpoint.com/") formatted = ServiceClient.format_url(client, url, foo=123, bar="value") self.assertEqual(formatted, "https://my_endpoint.com/test") def test_client_send(self): current_ua = self.cfg.user_agent client = ServiceClient(self.creds, self.cfg) client.config.keep_alive = True req_response = requests.Response() req_response._content = br'{"real": true}' # Has to be valid bytes JSON req_response._content_consumed = True req_response.status_code = 200 def side_effect(*args, **kwargs): return req_response session = mock.create_autospec(requests.Session) session.request.side_effect = side_effect session.adapters = { "http://": HTTPAdapter(), "https://": HTTPAdapter(), } # Be sure the mock does not trick me assert not hasattr(session.resolve_redirects, 'is_msrest_patched') client.config.pipeline._sender.driver.session = session client.config.credentials.signed_session.return_value = session client.config.credentials.refresh_session.return_value = session request = ClientRequest('GET', '/') client.send(request, stream=False) session.request.call_count = 0 session.request.assert_called_with( 'GET', '/', allow_redirects=True, cert=None, headers={ 'User-Agent': current_ua, 'Test': 'true' # From global config }, stream=False, timeout=100, verify=True ) assert session.resolve_redirects.is_msrest_patched client.send(request, headers={'id':'1234'}, content={'Test':'Data'}, stream=False) session.request.assert_called_with( 'GET', '/', data='{"Test": "Data"}', allow_redirects=True, cert=None, headers={ 'User-Agent': current_ua, 'Content-Length': '16', 'id':'1234', 'Test': 'true' # From global config }, stream=False, timeout=100, verify=True ) self.assertEqual(session.request.call_count, 1) session.request.call_count = 0 assert session.resolve_redirects.is_msrest_patched session.request.side_effect = requests.RequestException("test") with self.assertRaises(ClientRequestError): client.send(request, headers={'id':'1234'}, content={'Test':'Data'}, test='value', stream=False) session.request.assert_called_with( 'GET', '/', data='{"Test": "Data"}', allow_redirects=True, cert=None, headers={ 'User-Agent': current_ua, 'Content-Length': '16', 'id':'1234', 'Test': 'true' # From global config }, stream=False, timeout=100, verify=True ) self.assertEqual(session.request.call_count, 1) session.request.call_count = 0 assert session.resolve_redirects.is_msrest_patched session.request.side_effect = oauth2.rfc6749.errors.InvalidGrantError("test") with self.assertRaises(TokenExpiredError): client.send(request, headers={'id':'1234'}, content={'Test':'Data'}, test='value') self.assertEqual(session.request.call_count, 2) session.request.call_count = 0 session.request.side_effect = ValueError("test") with self.assertRaises(ValueError): client.send(request, headers={'id':'1234'}, content={'Test':'Data'}, test='value') @mock.patch.object(ClientRequest, "_format_data") def test_client_formdata_add(self, format_data): format_data.return_value = "formatted" request = ClientRequest('GET', '/') request.add_formdata() assert request.files == {} request = ClientRequest('GET', '/') request.add_formdata({'Test':'Data'}) assert request.files == {'Test':'formatted'} request = ClientRequest('GET', '/') request.headers = {'Content-Type':'1234'} request.add_formdata({'1':'1', '2':'2'}) assert request.files == {'1':'formatted', '2':'formatted'} request = ClientRequest('GET', '/') request.headers = {'Content-Type':'1234'} request.add_formdata({'1':'1', '2':None}) assert request.files == {'1':'formatted'} request = ClientRequest('GET', '/') request.headers = {'Content-Type':'application/x-www-form-urlencoded'} request.add_formdata({'1':'1', '2':'2'}) assert request.files is None assert request.data == {'1':'1', '2':'2'} request = ClientRequest('GET', '/') request.headers = {'Content-Type':'application/x-www-form-urlencoded'} request.add_formdata({'1':'1', '2':None}) assert request.files is None assert request.data == {'1':'1'} def test_format_data(self): data = ClientRequest._format_data(None) self.assertEqual(data, (None, None)) data = ClientRequest._format_data("Test") self.assertEqual(data, (None, "Test")) mock_stream = mock.create_autospec(io.BytesIO) data = ClientRequest._format_data(mock_stream) self.assertEqual(data, (None, mock_stream, "application/octet-stream")) mock_stream.name = "file_name" data = ClientRequest._format_data(mock_stream) self.assertEqual(data, ("file_name", mock_stream, "application/octet-stream")) def test_client_stream_download(self): req_response = requests.Response() req_response._content = "abc" req_response._content_consumed = True req_response.status_code = 200 client_response = RequestsClientResponse( None, req_response ) def user_callback(chunk, response): assert response is req_response assert chunk in ["a", "b", "c"] sync_iterator = client_response.stream_download(1, user_callback) result = "" for value in sync_iterator: result += value assert result == "abc" def test_request_builder(self): client = ServiceClient(self.creds, self.cfg) req = client.get('http://127.0.0.1/') assert req.method == 'GET' assert req.url == 'http://127.0.0.1/' assert req.headers == {'Accept': 'application/json'} assert req.data is None assert req.files is None req = client.put("http://127.0.0.1/", content={'creation': True}) assert req.method == 'PUT' assert req.url == "http://127.0.0.1/" assert req.headers == {'Content-Length': '18', 'Accept': 'application/json'} assert req.data == '{"creation": true}' assert req.files is None if __name__ == '__main__': unittest.main() msrest-for-python-0.6.21/tests/test_exceptions.py000066400000000000000000000076421400412460500221610ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import json import unittest try: from unittest import mock except ImportError: import mock import requests from msrest.serialization import Model, Deserializer from msrest.exceptions import HttpOperationError class TestExceptions(unittest.TestCase): def test_request_exception(self): def raise_for_status(): raise requests.RequestException() deserializer = Deserializer() response = mock.create_autospec(requests.Response) response.raise_for_status = raise_for_status response.reason = "TESTING" excep = HttpOperationError(deserializer, response) self.assertIn("TESTING", str(excep)) self.assertIn("Operation returned an invalid status code", str(excep)) def test_custom_exception(self): class ErrorResponse(Model): _attribute_map = { 'error': {'key': 'error', 'type': 'ErrorDetails'}, } def __init__(self, error=None): self.error = error class ErrorResponseException(HttpOperationError): def __init__(self, deserialize, response, *args): super(ErrorResponseException, self).__init__(deserialize, response, 'ErrorResponse', *args) class ErrorDetails(Model): _validation = { 'code': {'readonly': True}, 'message': {'readonly': True}, 'target': {'readonly': True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'target': {'key': 'target', 'type': 'str'}, } def __init__(self): self.code = None self.message = None self.target = None deserializer = Deserializer({ 'ErrorResponse': ErrorResponse, 'ErrorDetails': ErrorDetails }) response = requests.Response() response._content_consumed = True response._content = json.dumps( { "error": { "code": "NotOptedIn", "message": "You are not allowed to download invoices. Please contact your account administrator to turn on access in the management portal for allowing to download invoices through the API." } } ).encode('utf-8') response.headers = {"content-type": "application/json; charset=utf8"} excep = ErrorResponseException(deserializer, response) self.assertIn("NotOptedIn", str(excep)) self.assertIn("You are not allowed to download invoices", str(excep)) msrest-for-python-0.6.21/tests/test_paging.py000066400000000000000000000125321400412460500212370ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import unittest from msrest.paging import Paged class FakePaged(Paged): _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[str]'} } def __init__(self, *args, **kwargs): super(FakePaged, self).__init__(*args, **kwargs) class TestPaging(unittest.TestCase): def test_basic_paging(self): def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } else: return { 'nextLink': None, 'value': ['value2.0', 'value2.1'] } deserialized = FakePaged(internal_paging, {}) result_iterated = list(deserialized) self.assertListEqual( ['value1.0', 'value1.1', 'value2.0', 'value2.1'], result_iterated ) def test_advance_paging(self): def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } else: return { 'nextLink': None, 'value': ['value2.0', 'value2.1'] } deserialized = FakePaged(internal_paging, {}) page1 = deserialized.advance_page() self.assertListEqual( ['value1.0', 'value1.1'], page1 ) page2 = deserialized.advance_page() self.assertListEqual( ['value2.0', 'value2.1'], page2 ) with self.assertRaises(StopIteration): deserialized.advance_page() def test_get_paging(self): def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } elif next_link == 'page2': return { 'nextLink': 'page3', 'value': ['value2.0', 'value2.1'] } else: return { 'nextLink': None, 'value': ['value3.0', 'value3.1'] } deserialized = FakePaged(internal_paging, {}) page2 = deserialized.get('page2') self.assertListEqual( ['value2.0', 'value2.1'], page2 ) page3 = deserialized.get('page3') self.assertListEqual( ['value3.0', 'value3.1'], page3 ) def test_reset_paging(self): def internal_paging(next_link=None, raw=False): if not next_link: return { 'nextLink': 'page2', 'value': ['value1.0', 'value1.1'] } else: return { 'nextLink': None, 'value': ['value2.0', 'value2.1'] } deserialized = FakePaged(internal_paging, {}) deserialized.reset() result_iterated = list(deserialized) self.assertListEqual( ['value1.0', 'value1.1', 'value2.0', 'value2.1'], result_iterated ) deserialized = FakePaged(internal_paging, {}) # Push the iterator to the last element for element in deserialized: if element == "value2.0": break deserialized.reset() result_iterated = list(deserialized) self.assertListEqual( ['value1.0', 'value1.1', 'value2.0', 'value2.1'], result_iterated ) def test_none_value(self): def internal_paging(next_link=None, raw=False): return { 'nextLink': None, 'value': None } deserialized = FakePaged(internal_paging, {}) result_iterated = list(deserialized) self.assertEqual(len(result_iterated), 0) msrest-for-python-0.6.21/tests/test_pipeline.py000066400000000000000000000076641400412460500216110ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import json import requests import datetime from enum import Enum import unittest try: from unittest import mock except ImportError: import mock import xml.etree.ElementTree as ET import sys import pytest from msrest.universal_http import ( ClientRequest, ) from msrest.pipeline import ( ClientRawResponse, SansIOHTTPPolicy, Pipeline, HTTPSender ) from msrest import Configuration def test_sans_io_exception(): class BrokenSender(HTTPSender): def send(self, request, **config): raise ValueError("Broken") def __exit__(self, exc_type, exc_value, traceback): """Raise any exception triggered within the runtime context.""" return None pipeline = Pipeline([SansIOHTTPPolicy()], BrokenSender()) req = ClientRequest('GET', '/') with pytest.raises(ValueError): pipeline.run(req) class SwapExec(SansIOHTTPPolicy): def on_exception(self, requests, **kwargs): exc_type, exc_value, exc_traceback = sys.exc_info() raise NotImplementedError(exc_value) pipeline = Pipeline([SwapExec()], BrokenSender()) with pytest.raises(NotImplementedError): pipeline.run(req) class TestClientRequest(unittest.TestCase): def test_request_data(self): request = ClientRequest('GET', '/') data = "Lots of dataaaa" request.add_content(data) self.assertEqual(request.data, json.dumps(data)) self.assertEqual(request.headers.get('Content-Length'), '17') def test_request_xml(self): request = ClientRequest('GET', '/') data = ET.Element("root") request.add_content(data) assert request.data == b"\n" def test_request_url_with_params(self): request = ClientRequest('GET', '/') request.url = "a/b/c?t=y" request.format_parameters({'g': 'h'}) self.assertIn(request.url, [ 'a/b/c?g=h&t=y', 'a/b/c?t=y&g=h' ]) class TestClientResponse(unittest.TestCase): class Colors(Enum): red = 'red' blue = 'blue' def test_raw_response(self): response = mock.create_autospec(requests.Response) response.headers = {} response.headers["my-test"] = '1999-12-31T23:59:59-23:59' response.headers["colour"] = "red" raw = ClientRawResponse([], response) raw.add_headers({'my-test': 'iso-8601', 'another_header': 'str', 'colour': TestClientResponse.Colors}) self.assertIsInstance(raw.headers['my-test'], datetime.datetime) if __name__ == '__main__': unittest.main() msrest-for-python-0.6.21/tests/test_polling.py000066400000000000000000000134461400412460500214430ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import time try: from unittest import mock except ImportError: import mock import pytest from msrest.polling import * from msrest.service_client import ServiceClient from msrest.serialization import Model from msrest.configuration import Configuration def test_abc_polling(): abc_polling = PollingMethod() with pytest.raises(NotImplementedError): abc_polling.initialize(None, None, None) with pytest.raises(NotImplementedError): abc_polling.run() with pytest.raises(NotImplementedError): abc_polling.status() with pytest.raises(NotImplementedError): abc_polling.finished() with pytest.raises(NotImplementedError): abc_polling.resource() def test_no_polling(): no_polling = NoPolling() initial_response = "initial response" def deserialization_cb(response): assert response == initial_response return "Treated: "+response no_polling.initialize(None, initial_response, deserialization_cb) no_polling.run() # Should no raise and do nothing assert no_polling.status() == "succeeded" assert no_polling.finished() assert no_polling.resource() == "Treated: "+initial_response class PollingTwoSteps(PollingMethod): """An empty poller that returns the deserialized initial response. """ def __init__(self, sleep=0): self._initial_response = None self._deserialization_callback = None self._sleep = sleep def initialize(self, _, initial_response, deserialization_callback): self._initial_response = initial_response self._deserialization_callback = deserialization_callback self._finished = False def run(self): """Empty run, no polling. """ self._finished = True time.sleep(self._sleep) # Give me time to add callbacks! def status(self): """Return the current status as a string. :rtype: str """ return "succeeded" if self._finished else "running" def finished(self): """Is this polling finished? :rtype: bool """ return self._finished def resource(self): return self._deserialization_callback(self._initial_response) @pytest.fixture def client(): # We need a ServiceClient instance, but the poller itself don't use it, so we don't need # Something functional return ServiceClient(None, Configuration("http://example.org")) def test_poller(client): # Same the poller itself doesn't care about the initial_response, and there is no type constraint here initial_response = "Initial response" # Same for deserialization_callback, just pass to the polling_method def deserialization_callback(response): assert response == initial_response return "Treated: "+response method = NoPolling() poller = LROPoller(client, initial_response, deserialization_callback, method) done_cb = mock.MagicMock() poller.add_done_callback(done_cb) result = poller.result() assert poller.done() assert result == "Treated: "+initial_response assert poller.status() == "succeeded" done_cb.assert_called_once_with(method) # Test with a basic Model poller = LROPoller(client, initial_response, Model, method) assert poller._polling_method._deserialization_callback == Model.deserialize # Test poller that method do a run method = PollingTwoSteps(sleep=1) poller = LROPoller(client, initial_response, deserialization_callback, method) done_cb = mock.MagicMock() done_cb2 = mock.MagicMock() poller.add_done_callback(done_cb) poller.remove_done_callback(done_cb2) result = poller.result() assert result == "Treated: "+initial_response assert poller.status() == "succeeded" done_cb.assert_called_once_with(method) done_cb2.assert_not_called() with pytest.raises(ValueError) as excinfo: poller.remove_done_callback(done_cb) assert "Process is complete" in str(excinfo.value) def test_broken_poller(client): with pytest.raises(ValueError): LROPoller(None, None, None, None) class NoPollingError(PollingTwoSteps): def run(self): raise ValueError("Something bad happened") initial_response = "Initial response" def deserialization_callback(response): return "Treated: "+response method = NoPollingError() poller = LROPoller(client, initial_response, deserialization_callback, method) with pytest.raises(ValueError) as excinfo: poller.result() assert "Something bad happened" in str(excinfo.value) msrest-for-python-0.6.21/tests/test_requests_universal.py000066400000000000000000000102341400412460500237320ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import concurrent.futures from requests.adapters import HTTPAdapter from msrest.universal_http import ( ClientRequest ) from msrest.universal_http.requests import ( BasicRequestsHTTPSender, RequestsHTTPSender, RequestHTTPSenderConfiguration ) def test_session_callback(): cfg = RequestHTTPSenderConfiguration() with RequestsHTTPSender(cfg) as driver: def callback(session, global_config, local_config, **kwargs): assert session is driver.session assert global_config is cfg assert local_config["test"] my_kwargs = kwargs.copy() my_kwargs.update({'used_callback': True}) return my_kwargs cfg.session_configuration_callback = callback request = ClientRequest('GET', 'http://127.0.0.1/') output_kwargs = driver._configure_send(request, **{"test": True}) assert output_kwargs['used_callback'] def test_max_retries_on_default_adapter(): # max_retries must be applied only on the default adapters of requests # If the user adds its own adapter, don't touch it cfg = RequestHTTPSenderConfiguration() max_retries = cfg.retry_policy() with RequestsHTTPSender(cfg) as driver: request = ClientRequest('GET', '/') driver.session.mount('"http://127.0.0.1/"', HTTPAdapter()) driver._configure_send(request) assert driver.session.adapters["http://"].max_retries is max_retries assert driver.session.adapters["https://"].max_retries is max_retries assert driver.session.adapters['"http://127.0.0.1/"'].max_retries is not max_retries def test_threading_basic_requests(): # Basic should have the session for all threads, it's why it's not recommended sender = BasicRequestsHTTPSender() main_thread_session = sender.session def thread_body(local_sender): # Should be the same session assert local_sender.session is main_thread_session return True with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: future = executor.submit(thread_body, sender) assert future.result() def test_threading_cfg_requests(): cfg = RequestHTTPSenderConfiguration() # The one with conf however, should have one session per thread automatically sender = RequestsHTTPSender(cfg) main_thread_session = sender.session # Check that this main session is patched assert main_thread_session.resolve_redirects.is_msrest_patched def thread_body(local_sender): # Should have it's own session assert local_sender.session is not main_thread_session # But should be patched as the main thread session assert local_sender.session.resolve_redirects.is_msrest_patched return True with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: future = executor.submit(thread_body, sender) assert future.result() msrest-for-python-0.6.21/tests/test_runtime.py000066400000000000000000000403611400412460500214560ustar00rootroot00000000000000#-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import json import httpretty try: from http.server import( HTTPServer, BaseHTTPRequestHandler) except ImportError: from BaseHTTPServer import HTTPServer from BaseHTTPServer import BaseHTTPRequestHandler import os import requests import re import unittest try: from unittest import mock except ImportError: import mock from msrest.authentication import ( Authentication, OAuthTokenAuthentication) from msrest.universal_http import ( ClientRequest ) from msrest import ( ServiceClient, Configuration) from msrest.exceptions import ( TokenExpiredError, ClientRequestError) import pytest class TestRuntime(unittest.TestCase): @httpretty.activate def test_credential_headers(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", body='[{"title": "Test Data"}]', content_type="application/json") token = { 'access_token': 'eswfld123kjhn1v5423', 'refresh_token': 'asdfkljh23490sdf', 'token_type': 'Bearer', 'expires_in': '3600', } cfg = Configuration("https://my_service.com") cfg.credentials = OAuthTokenAuthentication("client_id", token) client = ServiceClient(None, cfg) url = client.format_url("/get_endpoint") request = client.get(url, {'check':True}) response = client.send(request) assert 'Authorization' in response.request.headers assert response.request.headers['Authorization'] == 'Bearer eswfld123kjhn1v5423' httpretty.has_request() assert response.json() == [{"title": "Test Data"}] # Expiration test token['expires_in'] = '-30' cfg.credentials = OAuthTokenAuthentication("client_id", token) client = ServiceClient(None, cfg) url = client.format_url("/get_endpoint") request = client.get(url, {'check':True}) with pytest.raises(TokenExpiredError): response = client.send(request) @mock.patch.object(requests, 'Session') def test_request_fail(self, mock_requests): mock_requests.return_value.request.return_value = mock.Mock(text="text") cfg = Configuration("https://my_service.com") cfg.credentials = Authentication() client = ServiceClient(None, cfg) url = client.format_url("/get_endpoint") request = client.get(url, {'check':True}) response = client.send(request) assert response.text == "text" mock_requests.return_value.request.side_effect = requests.RequestException with self.assertRaises(ClientRequestError): client.send(request) @httpretty.activate def test_request_proxy(self): # Note that this test requires requests >= 2.8.0 to accept host on proxy cfg = Configuration("http://my_service.com") cfg.proxies.add("http://my_service.com", 'http://localhost:57979') cfg.credentials = Authentication() httpretty.register_uri(httpretty.GET, "http://localhost:57979/get_endpoint?check=True", body='"Mocked body"', content_type="application/json", status=200) client = ServiceClient(None, cfg) url = client.format_url("/get_endpoint") request = client.get(url, {'check':True}) response = client.send(request) assert response.json() == "Mocked body" with mock.patch.dict('os.environ', {'HTTP_PROXY': "http://localhost:1987"}): httpretty.register_uri(httpretty.GET, "http://localhost:1987/get_endpoint?check=True", body='"Mocked body"', content_type="application/json", status=200) cfg = Configuration("http://my_service.com") client = ServiceClient(None, cfg) url = client.format_url("/get_endpoint") request = client.get(url, {'check':True}) response = client.send(request) assert response.json() == "Mocked body" class TestRedirect(unittest.TestCase): def setUp(self): cfg = Configuration("https://my_service.com") cfg.retry_policy.backoff_factor=0 cfg.redirect_policy.max_redirects=2 cfg.credentials = Authentication() self.client = ServiceClient(None, cfg) return super(TestRedirect, self).setUp() @httpretty.activate def test_request_redirect_post(self): url = self.client.format_url("/get_endpoint") request = self.client.post(url, {'check':True}) httpretty.register_uri(httpretty.GET, 'https://my_service.com/http/success/get/200', status=200) httpretty.register_uri(httpretty.POST, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=303, method='POST', location='/http/success/get/200'), ]) response = self.client.send(request) assert response.status_code == 200, "Should redirect with GET on 303 with location header" assert response.request.method == 'GET' assert response.history[0].status_code == 303 assert response.history[0].is_redirect httpretty.reset() httpretty.register_uri(httpretty.POST, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=303, method='POST'), ]) response = self.client.send(request) assert response.status_code == 303, "Should not redirect on 303 without location header" assert response.history == [] assert not response.is_redirect @httpretty.activate def test_request_redirect_head(self): url = self.client.format_url("/get_endpoint") request = self.client.head(url, {'check':True}) httpretty.register_uri(httpretty.HEAD, 'https://my_service.com/http/success/200', status=200) httpretty.register_uri(httpretty.HEAD, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=307, method='HEAD', location='/http/success/200'), ]) response = self.client.send(request) assert response.status_code == 200, "Should redirect on 307 with location header" assert response.request.method == 'HEAD' assert response.history[0].status_code == 307 assert response.history[0].is_redirect httpretty.reset() httpretty.register_uri(httpretty.HEAD, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=307, method='HEAD'), ]) response = self.client.send(request) assert response.status_code == 307, "Should not redirect on 307 without location header" assert response.history == [] assert not response.is_redirect @httpretty.activate def test_request_redirect_delete(self): url = self.client.format_url("/get_endpoint") request = self.client.delete(url, {'check':True}) httpretty.register_uri(httpretty.DELETE, 'https://my_service.com/http/success/200', status=200) httpretty.register_uri(httpretty.DELETE, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=307, method='DELETE', location='/http/success/200'), ]) response = self.client.send(request) assert response.status_code == 200, "Should redirect on 307 with location header" assert response.request.method == 'DELETE' assert response.history[0].status_code == 307 assert response.history[0].is_redirect httpretty.reset() httpretty.register_uri(httpretty.DELETE, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=307, method='DELETE'), ]) response = self.client.send(request) assert response.status_code == 307, "Should not redirect on 307 without location header" assert response.history == [] assert not response.is_redirect @httpretty.activate def test_request_redirect_put(self): url = self.client.format_url("/get_endpoint") request = self.client.put(url, {'check':True}) httpretty.register_uri(httpretty.PUT, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=305, method='PUT', location='/http/success/200'), ]) response = self.client.send(request) assert response.status_code == 305, "Should not redirect on 305" assert response.history == [] assert not response.is_redirect @httpretty.activate def test_request_redirect_get(self): url = self.client.format_url("/get_endpoint") request = self.client.get(url, {'check':True}) httpretty.register_uri(httpretty.GET, "https://my_service.com/http/finished", responses=[ httpretty.Response(body="", status=200, method='GET'), ]) httpretty.register_uri(httpretty.GET, "https://my_service.com/http/redirect3", responses=[ httpretty.Response(body="", status=307, method='GET', location='/http/finished'), ]) httpretty.register_uri(httpretty.GET, "https://my_service.com/http/redirect2", responses=[ httpretty.Response(body="", status=307, method='GET', location='/http/redirect3'), ]) httpretty.register_uri(httpretty.GET, "https://my_service.com/http/redirect1", responses=[ httpretty.Response(body="", status=307, method='GET', location='/http/redirect2'), ]) httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="", status=307, method='GET', location='/http/redirect1'), ]) with self.assertRaises(ClientRequestError, msg="Should exceed maximum redirects"): self.client.send(request) class TestRuntimeRetry(unittest.TestCase): def setUp(self): cfg = Configuration("https://my_service.com") cfg.retry_policy.backoff_factor=0 creds = Authentication() self.client = ServiceClient(creds, cfg) url = self.client.format_url("/get_endpoint") self.request = self.client.get(url, {'check':True}) return super(TestRuntimeRetry, self).setUp() @httpretty.activate def test_request_retry_502(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="retry response", status=502), httpretty.Response(body='success response', status=202), ]) response = self.client.send(self.request) self.assertEqual(response.status_code, 202, msg="Should retry on 502") @httpretty.activate def test_request_retry_408(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="retry response", status=408), httpretty.Response(body='success response', status=202), ]) response = self.client.send(self.request) self.assertEqual(response.status_code, 202, msg="Should retry on 408") @httpretty.activate def test_request_retry_3_times(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="retry response", status=502), httpretty.Response(body="retry response", status=502), httpretty.Response(body="retry response", status=502), httpretty.Response(body='success response', status=202), ]) response = self.client.send(self.request) self.assertEqual(response.status_code, 202, msg="Should retry 3 times") @httpretty.activate def test_request_retry_max(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="retry response", status=502), httpretty.Response(body="retry response", status=502), httpretty.Response(body="retry response", status=502), httpretty.Response(body="retry response", status=502), ]) with self.assertRaises(ClientRequestError, msg="Max retries reached"): self.client.send(self.request) @httpretty.activate def test_request_retry_404(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="retry response", status=404), httpretty.Response(body='success response', status=202), ]) response = self.client.send(self.request) self.assertEqual(response.status_code, 404, msg="Shouldn't retry on 404") @httpretty.activate def test_request_retry_501(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="retry response", status=501), httpretty.Response(body='success response', status=202), ]) response = self.client.send(self.request) self.assertEqual(response.status_code, 501, msg="Shouldn't retry on 501") @httpretty.activate def test_request_retry_505(self): httpretty.register_uri(httpretty.GET, "https://my_service.com/get_endpoint", responses=[ httpretty.Response(body="retry response", status=505), httpretty.Response(body='success response', status=202), ]) response = self.client.send(self.request) self.assertEqual(response.status_code, 505, msg="Shouldn't retry on 505") if __name__ == '__main__': unittest.main()msrest-for-python-0.6.21/tests/test_serialization.py000066400000000000000000002463301400412460500226540ustar00rootroot00000000000000# -*- coding: utf-8 -*- #-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- from decimal import Decimal import sys import json import isodate import logging import pickle from enum import Enum from datetime import datetime, timedelta, date, time import unittest import xml.etree.ElementTree as ET from requests import Response from msrest.serialization import Model, last_restapi_key_transformer, full_restapi_key_transformer, rest_key_extractor from msrest import Serializer, Deserializer from msrest.exceptions import SerializationError, DeserializationError, ValidationError from . import storage_models import pytest class Resource(Model): """Resource :param str id: Resource Id :param str name: Resource name :param str type: Resource type :param str location: Resource location :param dict tags: Resource tags """ _validation = { 'location': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__(self, location, id=None, name=None, type=None, tags=None, **kwargs): super(Resource, self).__init__(**kwargs) self.id = id self.name = name self.type = type self.location = location self.tags = tags class GenericResource(Resource): """ Resource information. :param str id: Resource Id :param str name: Resource name :param str type: Resource type :param str location: Resource location :param dict tags: Resource tags :param Plan plan: Gets or sets the plan of the resource. :param object properties: Gets or sets the resource properties. """ _validation = {} _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'plan': {'key': 'plan', 'type': 'Plan'}, 'properties': {'key': 'properties', 'type': 'object'}, } def __init__(self, location, id=None, name=None, type=None, tags=None, plan=None, properties=None): super(GenericResource, self).__init__(location, id=id, name=name, type=type, tags=tags) self.plan = plan self.properties = properties class TestModelDeserialization(unittest.TestCase): def setUp(self): self.d = Deserializer({'Resource':Resource, 'GenericResource':GenericResource}) return super(TestModelDeserialization, self).setUp() def test_model_kwargs(self): class MyModel(Model): _validation = { 'id': {'readonly': True}, 'name': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, } def __init__(self, **kwargs): super(MyModel, self).__init__(**kwargs) self.id = None self.name = kwargs.get('name', None) self.location = kwargs.get('location', None) validation = MyModel().validate() self.assertEqual(str(validation[0]), "Parameter 'MyModel.name' can not be None.") @unittest.skipIf(sys.version_info < (3,4), "assertLogs not supported before 3.4") def test_model_kwargs_logs(self): class MyModel(Model): _validation = { 'id': {'readonly': True}, 'name': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, } def __init__(self, **kwargs): super(MyModel, self).__init__(**kwargs) self.id = None self.name = kwargs.get('name', None) self.location = kwargs.get('location', None) with self.assertLogs('msrest.serialization', level='WARNING') as cm: MyModel(name="test", id="123") # Should log that id is readonly self.assertEqual(len(cm.output), 1) self.assertIn("attribute id", cm.output[0]) self.assertIn("Readonly", cm.output[0]) with self.assertLogs('msrest.serialization', level='WARNING') as cm: MyModel(something="ioprez") # Should log that this is unknown self.assertEqual(len(cm.output), 1) self.assertIn("not a known attribute", cm.output[0]) @unittest.skipIf(sys.version_info < (3,4), "assertLogs not supported before 3.4") def test_empty_enum_logs(self): class StatusType(str, Enum): success = "success" failed = "failed" d = Deserializer({"StatusType": StatusType}) with self.assertRaises(AssertionError): with self.assertLogs('msrest.serialization', level='WARNING') as cm: result = d(StatusType, "failed") self.assertEqual(len(cm.output), 0) self.assertEqual(result, StatusType.failed) with self.assertRaises(AssertionError): with self.assertLogs('msrest.serialization', level='WARNING') as cm: result = d(StatusType, None) self.assertEqual(len(cm.output), 0) self.assertEqual(result, None) with self.assertLogs('msrest.serialization', level='WARNING') as cm: result = d(StatusType, "aborted") self.assertEqual(result, 'aborted') self.assertEqual(len(cm.output), 1) self.assertTrue("Deserializer is not able to find aborted as valid enum" in cm.output[0]) def test_response(self): data = { "properties": { "platformUpdateDomainCount": 5, "platformFaultDomainCount": 3, "virtualMachines": [] }, "id": "/subscriptions/abc-def-ghi-jklmnop/resourceGroups/test_mgmt_resource_test_resourcesea/providers/Microsoft.Compute/availabilitySets/pytest", "name": "pytest", "type": "Microsoft.Compute/availabilitySets", "location": "westus" } model = self.d('GenericResource', json.dumps(data), 'application/json') self.assertEqual(model.properties['platformFaultDomainCount'], 3) self.assertEqual(model.location, 'westus') class TestRuntimeSerialized(unittest.TestCase): class TestObj(Model): _attribute_map = { 'attr_a': {'key':'id', 'type':'str'}, 'attr_b': {'key':'AttrB', 'type':'int'}, 'attr_c': {'key':'Key_C', 'type': 'bool'}, 'attr_d': {'key':'AttrD', 'type':'[int]'}, 'attr_e': {'key':'AttrE', 'type': '{float}'}, 'attr_f': {'key':'AttrF', 'type': 'duration'}, 'attr_g': {'key':'properties.AttrG', 'type':'str'}, } def __init__(self, attr_a=None, attr_b=None, attr_c=None, attr_d=None, attr_e=None, attr_f=None, attr_g=None): self.attr_a = attr_a self.attr_b = attr_b self.attr_c = attr_c self.attr_d = attr_d self.attr_e = attr_e self.attr_f = attr_f self.attr_g = attr_g def __str__(self): return "Test_Object" def setUp(self): self.s = Serializer({'TestObj': self.TestObj}) return super(TestRuntimeSerialized, self).setUp() def test_validation_type(self): # https://github.com/Azure/msrest-for-python/issues/85 s = Serializer() s.query("filter", 186, "int", maximum=666) s.query("filter", "186", "int", maximum=666) class TestValidationObj(Model): _attribute_map = { 'attr_a': {'key':'id', 'type':'int'}, } _validation = { 'attr_a': {'maximum': 4294967295, 'minimum': 1}, } test_obj = TestValidationObj() test_obj.attr_a = 186 errors_found = test_obj.validate() assert not errors_found test_obj.attr_a = '186' errors_found = test_obj.validate() assert not errors_found def test_validation_flag(self): s = Serializer() s.client_side_validation = True with self.assertRaises(ValidationError): s.query("filter", "", "str", min_length=666) with self.assertRaises(ValidationError): s.url("filter", "", "str", min_length=666) with self.assertRaises(ValidationError): s.header("filter", "", "str", min_length=666) test_obj = self.TestObj() self.TestObj._validation = { 'attr_b': {'required': True}, } test_obj.attr_b = None with self.assertRaises(ValidationError): self.s.body(test_obj, 'TestObj') s.client_side_validation = False s.query("filter", "", "str", min_length=666) s.url("filter", "", "str", min_length=666) s.header("filter", "", "str", min_length=666) s.body(test_obj, 'TestObj') def test_serialize_query(self): s = Serializer() assert s.query("filter", "boo", "str") == "boo" assert s.query("filter", "boo,bar", "str", skip_quote=True) == "boo,bar" assert s.query("filter", 12, "int") == "12" assert s.query("filter", [1, 2, 3], "[int]", div=",") == "1,2,3" assert s.query("filter", ['a', 'b', 'c'], "[str]", div=",") == "a,b,c" assert s.query("filter", ['a', None, 'c'], "[str]", div=",") == "a,,c" assert s.query("filter", [',', ',', ','], "[str]", div=",") == "%2C,%2C,%2C" assert s.query("filter", [',', ',', ','], "[str]", div="|", skip_quote=True) == ",|,|," def test_serialize_custom_model(self): class CustomSample(Model): _validation = { 'a': {'required': True}, } _attribute_map = { 'a': {'key': 'a', 'type': 'str'}, } def __init__(self, a): self.a = a s = Serializer() model = CustomSample("helloworld") serialized = s._serialize(model) assert serialized is not None assert isinstance(serialized, dict) assert serialized['a'] == "helloworld" def test_serialize_direct_model(self): testobj = self.TestObj() testobj.attr_a = "myid" testobj.attr_b = 42 testobj.attr_c = True testobj.attr_d = [1,2,3] testobj.attr_e = {"pi": 3.14} testobj.attr_f = timedelta(1) testobj.attr_g = "RecursiveObject" serialized = testobj.serialize() expected = { "id": "myid", "AttrB": 42, "Key_C": True, "AttrD": [1,2,3], "AttrE": {"pi": 3.14}, "AttrF": "P1D", "properties": { "AttrG": "RecursiveObject" } } self.assertDictEqual(expected, serialized) jsonable = json.dumps(testobj.as_dict()) expected = { "attr_a": "myid", "attr_b": 42, "attr_c": True, "attr_d": [1,2,3], "attr_e": {"pi": 3.14}, "attr_f": "P1D", "attr_g": "RecursiveObject" } self.assertDictEqual(expected, json.loads(jsonable)) jsonable = json.dumps(testobj.as_dict(key_transformer=last_restapi_key_transformer)) expected = { "id": "myid", "AttrB": 42, "Key_C": True, "AttrD": [1,2,3], "AttrE": {"pi": 3.14}, "AttrF": "P1D", "AttrG": "RecursiveObject" } self.assertDictEqual(expected, json.loads(jsonable)) jsonable = json.dumps(testobj.as_dict(key_transformer=lambda x,y,z: (x+"XYZ", z))) expected = { "attr_aXYZ": "myid", "attr_bXYZ": 42, "attr_cXYZ": True, "attr_dXYZ": [1,2,3], "attr_eXYZ": {"pi": 3.14}, "attr_fXYZ": "P1D", "attr_gXYZ": "RecursiveObject" } self.assertDictEqual(expected, json.loads(jsonable)) def value_override(attr, attr_desc, value): key, value = last_restapi_key_transformer(attr, attr_desc, value) if key == "AttrB": value += 1 return key, value jsonable = json.dumps(testobj.as_dict(key_transformer=value_override)) expected = { "id": "myid", "AttrB": 43, "Key_C": True, "AttrD": [1,2,3], "AttrE": {"pi": 3.14}, "AttrF": "P1D", "AttrG": "RecursiveObject" } self.assertDictEqual(expected, json.loads(jsonable)) def test_validate(self): # Assert not necessary, should not raise exception self.s.validate("simplestring", "StringForLog", pattern="^[a-z]+$") self.s.validate(u"UTF8ééééé", "StringForLog", pattern=r"^[\w]+$") def test_model_validate(self): class TestObj(Model): _validation = { 'name': {'min_length': 3}, 'display_names': {'min_items': 2}, } _attribute_map = { 'name': {'key':'name', 'type':'str'}, 'rec_list': {'key':'rec_list', 'type':'[[TestObj]]'}, 'rec_dict': {'key':'rec_dict', 'type':'{{TestObj}}'}, 'display_names': {'key': 'display_names', 'type': '[str]'}, 'obj': {'key':'obj', 'type':'TestObj'}, } def __init__(self, name): self.name = name self.rec_list = None self.rec_dict = None self.display_names = None self.obj = None obj = TestObj("ab") obj.rec_list = [[TestObj("bc")]] obj.rec_dict = {"key": {"key": TestObj("bc")}} obj.display_names = ["ab"] obj.obj = TestObj("ab") broken_rules = obj.validate() self.assertEqual(5, len(broken_rules)) str_broken_rules = [str(v) for v in broken_rules] self.assertIn( "Parameter 'TestObj.name' must have length greater than 3.", str_broken_rules ) self.assertIn( "Parameter 'TestObj.display_names' must contain at least 2 items.", str_broken_rules ) def test_obj_serialize_none(self): """Test that serialize None in object is still None. """ obj = self.s.serialize_object({'test': None}) self.assertIsNone(obj['test']) def test_obj_with_malformed_map(self): """ Test serializing an object with a malformed attribute_map. """ test_obj = type("BadTestObj", (Model,), {"_attribute_map":None}) with self.assertRaises(SerializationError): self.s._serialize(test_obj) test_obj._attribute_map = {"attr":"val"} with self.assertRaises(SerializationError): self.s._serialize(test_obj) test_obj._attribute_map = {"attr":{"val":1}} with self.assertRaises(SerializationError): self.s._serialize(test_obj) def test_obj_with_mismatched_map(self): """ Test serializing an object with mismatching attributes and map. """ test_obj = type("BadTestObj", (Model,), {"_attribute_map":None}) test_obj._attribute_map = {"abc":{"key":"ABC", "type":"str"}} with self.assertRaises(SerializationError): self.s._serialize(test_obj) def test_attr_enum(self): """ Test serializing with Enum. """ test_obj = type("TestEnumObj", (Model,), {"_attribute_map":None}) test_obj._attribute_map = { "abc":{"key":"ABC", "type":"TestEnum"} } class TestEnum(Enum): val = "Value" t = test_obj() t.abc = TestEnum.val serialized = self.s._serialize(t) expected = { "ABC": "Value" } self.assertEqual(expected, serialized) class TestEnum2(Enum): val2 = "Value2" t.abc = TestEnum2.val2 serializer = Serializer({ 'TestEnum': TestEnum, 'TestEnum2': TestEnum2 }) with self.assertRaises(SerializationError): serializer._serialize(t) serializer = Serializer({ 'TestEnumObj': test_obj, 'TestEnum': TestEnum }) serialized = serializer.body({ 'abc': TestEnum.val }, 'TestEnumObj') expected = { 'ABC': 'Value' } self.assertEqual(expected, serialized) # model-as-string=True test_obj._attribute_map = { "abc":{"key":"ABC", "type":"str"} } serialized = serializer.body({ 'abc': TestEnum.val }, 'TestEnumObj') expected = { 'ABC': 'Value' } self.assertEqual(expected, serialized) def test_attr_none(self): """ Test serializing an object with None attributes. """ test_obj = self.TestObj() message = self.s._serialize(test_obj) self.assertIsInstance(message, dict) self.assertFalse('id' in message) def test_attr_int(self): """ Test serializing an object with Int attributes. """ test_obj = self.TestObj() self.TestObj._validation = { 'attr_b': {'required': True}, } test_obj.attr_b = None with self.assertRaises(ValidationError): self.s.body(test_obj, 'TestObj') validation_errors = test_obj.validate() self.assertEqual(len(validation_errors), 1) test_obj.attr_b = 25 message = self.s._serialize(test_obj) self.assertEqual(message['AttrB'], int(test_obj.attr_b)) test_obj.attr_b = "34534" message = self.s._serialize(test_obj) self.assertEqual(message['AttrB'], int(test_obj.attr_b)) test_obj.attr_b = "NotANumber" with self.assertRaises(SerializationError): self.s._serialize(test_obj) self.TestObj._validation = {} def test_attr_str(self): """ Test serializing an object with Str attributes. """ test_obj = self.TestObj() self.TestObj._validation = { 'attr_a': {'required': True}, } test_obj.attr_a = None with self.assertRaises(ValidationError): self.s.body(test_obj, 'TestObj') validation_errors = test_obj.validate() self.assertEqual(len(validation_errors), 1) self.TestObj._validation = {} test_obj.attr_a = "TestString" message = self.s._serialize(test_obj) self.assertEqual(message['id'], str(test_obj.attr_a)) test_obj.attr_a = 1234 message = self.s._serialize(test_obj) self.assertEqual(message['id'], str(test_obj.attr_a)) test_obj.attr_a = list() message = self.s._serialize(test_obj) self.assertEqual(message['id'], str(test_obj.attr_a)) test_obj.attr_a = [1] message = self.s._serialize(test_obj) self.assertEqual(message['id'], str(test_obj.attr_a)) def test_attr_bool(self): """ Test serializing an object with bool attributes. """ test_obj = self.TestObj() test_obj.attr_c = True message = self.s._serialize(test_obj) self.assertEqual(message['Key_C'], True) test_obj.attr_c = "" message = self.s._serialize(test_obj) self.assertTrue('Key_C' in message) test_obj.attr_c = None message = self.s._serialize(test_obj) self.assertFalse('Key_C' in message) test_obj.attr_c = "NotEmpty" message = self.s._serialize(test_obj) self.assertEqual(message['Key_C'], True) def test_attr_sequence(self): """ Test serializing a sequence. """ test_obj = ["A", "B", "C"] output = self.s._serialize(test_obj, '[str]', div='|') self.assertEqual(output, "|".join(test_obj)) test_obj = [1,2,3] output = self.s._serialize(test_obj, '[str]', div=',') self.assertEqual(output, ",".join([str(i) for i in test_obj])) def test_attr_duration(self): """ Test serializing a duration """ test_obj = self.TestObj() test_obj.attr_f = timedelta(days=1) message = self.s._serialize(test_obj) self.assertEqual("P1D", message["AttrF"]) test_obj = self.TestObj() test_obj.attr_f = isodate.parse_duration("P3Y6M4DT12H30M5S") message = self.s.body({ "attr_f": isodate.parse_duration("P3Y6M4DT12H30M5S")}, 'TestObj') self.assertEqual("P3Y6M4DT12H30M5S", message["AttrF"]) def test_attr_list_simple(self): """ Test serializing an object with simple-typed list attributes """ test_obj = self.TestObj() test_obj.attr_d = [] message = self.s._serialize(test_obj) self.assertEqual(message['AttrD'], test_obj.attr_d) test_obj.attr_d = [1,2,3] message = self.s._serialize(test_obj) self.assertEqual(message['AttrD'], test_obj.attr_d) test_obj.attr_d = ["1","2","3"] message = self.s._serialize(test_obj) self.assertEqual(message['AttrD'], [int(i) for i in test_obj.attr_d]) test_obj.attr_d = ["test","test2","test3"] with self.assertRaises(SerializationError): self.s._serialize(test_obj) test_obj.attr_d = "NotAList" with self.assertRaises(SerializationError): self.s._serialize(test_obj) def test_empty_list(self): input = [] output = self.s._serialize(input, '[str]') self.assertEqual(output, input) def test_attr_list_complex(self): """ Test serializing an object with a list of complex objects as an attribute. """ list_obj = type("ListObj", (Model,), {"_attribute_map":None, "_validation":{}, "abc":None}) list_obj._attribute_map = {"abc":{"key":"ABC", "type":"int"}} list_obj.abc = "123" test_obj = type("CmplxTestObj", (Model,), {"_attribute_map":None, "_validation":{}, "test_list":None}) test_obj._attribute_map = {"test_list":{"key":"_list", "type":"[ListObj]"}} test_obj.test_list = [list_obj] message = self.s._serialize(test_obj) self.assertEqual(message, {'_list':[{'ABC':123}]}) list_obj = type("BadListObj", (Model,), {"map":None}) test_obj._attribute_map = {"test_list":{"key":"_list", "type":"[BadListObj]"}} test_obj.test_list = [list_obj] s = self.s._serialize(test_obj) self.assertEqual(s, {'_list':[{}]}) def test_attr_dict_simple(self): """ Test serializing an object with a simple dictionary attribute. """ test_obj = self.TestObj() test_obj.attr_e = {"value": 3.14} message = self.s._serialize(test_obj) self.assertEqual(message['AttrE']['value'], float(test_obj.attr_e["value"])) test_obj.attr_e = {1: "3.14"} message = self.s._serialize(test_obj) self.assertEqual(message['AttrE']['1'], float(test_obj.attr_e[1])) test_obj.attr_e = "NotADict" with self.assertRaises(SerializationError): self.s._serialize(test_obj) with pytest.raises(ValidationError) as err: test_obj.validate() assert "Parameter 'attr_e' must be of type 'dict[str, float]'" in str(err.value) test_obj.attr_e = {"value": "NotAFloat"} with self.assertRaises(SerializationError): self.s._serialize(test_obj) def test_serialize_datetime(self): date_obj = isodate.parse_datetime('2015-01-01T00:00:00') date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '2015-01-01T00:00:00.000Z') date_obj = isodate.parse_datetime('1999-12-31T23:59:59-12:00') date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '2000-01-01T11:59:59.000Z') with self.assertRaises(SerializationError): date_obj = isodate.parse_datetime('9999-12-31T23:59:59-12:00') date_str = Serializer.serialize_iso(date_obj) with self.assertRaises(SerializationError): date_obj = isodate.parse_datetime('0001-01-01T00:00:00+23:59') date_str = Serializer.serialize_iso(date_obj) date_obj = isodate.parse_datetime("2015-06-01T16:10:08.0121-07:00") date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '2015-06-01T23:10:08.0121Z') date_obj = datetime.min date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '0001-01-01T00:00:00.000Z') date_obj = datetime.max date_str = Serializer.serialize_iso(date_obj) self.assertEqual(date_str, '9999-12-31T23:59:59.999999Z') date_obj = isodate.parse_datetime('2012-02-24T00:53:52.000001Z') date_str = Serializer.serialize_iso(date_obj) assert date_str == '2012-02-24T00:53:52.000001Z' date_obj = isodate.parse_datetime('2012-02-24T00:53:52.780Z') date_str = Serializer.serialize_iso(date_obj) assert date_str == '2012-02-24T00:53:52.780Z' def test_serialize_time(self): time_str = Serializer.serialize_time(time(11,22,33)) assert time_str == "11:22:33" time_str = Serializer.serialize_time(time(11,22,33,444)) assert time_str == "11:22:33.444" def test_serialize_primitive_types(self): a = self.s.serialize_data(1, 'int') self.assertEqual(a, 1) b = self.s.serialize_data(True, 'bool') self.assertEqual(b, True) c = self.s.serialize_data('True', 'str') self.assertEqual(c, 'True') d = self.s.serialize_data(100.0123, 'float') self.assertEqual(d, 100.0123) def test_serialize_object(self): a = self.s.body(1, 'object') self.assertEqual(a, 1) b = self.s.body(True, 'object') self.assertEqual(b, True) c = self.s.serialize_data('True', 'object') self.assertEqual(c, 'True') d = self.s.serialize_data(100.0123, 'object') self.assertEqual(d, 100.0123) e = self.s.serialize_data({}, 'object') self.assertEqual(e, {}) f = self.s.body({"test":"data"}, 'object') self.assertEqual(f, {"test":"data"}) g = self.s.body({"test":{"value":"data"}}, 'object') self.assertEqual(g, {"test":{"value":"data"}}) h = self.s.serialize_data({"test":self.TestObj('id')}, 'object') self.assertEqual(h, {"test":{'id': 'id'}}) i = self.s.serialize_data({"test":[1,2,3,4,5]}, 'object') self.assertEqual(i, {"test":[1,2,3,4,5]}) def test_serialize_empty_iter(self): a = self.s.serialize_dict({}, 'int') self.assertEqual(a, {}) b = self.s.serialize_iter([], 'int') self.assertEqual(b, []) def test_serialize_str_as_iter(self): with self.assertRaises(SerializationError): self.s.serialize_iter("I am a string", 'str') def test_serialize_int_as_iter_with_div(self): # https://github.com/Azure/azure-sdk-for-python/issues/4501 assert self.s.serialize_iter([1,2,3,4], "int", ",") == "1,2,3,4" def test_serialize_from_dict_datetime(self): class DateTimeTest(Model): _attribute_map = { 'birthday':{'key':'birthday','type':'iso-8601'}, } def __init__(self, birthday): self.birthday = birthday serializer = Serializer({ 'DateTimeTest': DateTimeTest }) mydate = serializer.body( {'birthday': datetime(1980, 12, 27)}, 'DateTimeTest' ) assert mydate["birthday"] == "1980-12-27T00:00:00.000Z" def test_serialize_json_obj(self): class ComplexId(Model): _validation = {} _attribute_map = {'id':{'key':'id','type':'int'}, 'name':{'key':'name','type':'str'}, 'age':{'key':'age','type':'float'}, 'male':{'key':'male','type':'bool'}, 'birthday':{'key':'birthday','type':'iso-8601'}, 'anniversary':{'key':'anniversary', 'type':'iso-8601'}} id = 1 name = "Joey" age = 23.36 male = True birthday = '1992-01-01T00:00:00.000Z' anniversary = isodate.parse_datetime('2013-12-08T00:00:00') class ComplexJson(Model): _validation = {} _attribute_map = {'p1':{'key':'p1','type':'str'}, 'p2':{'key':'p2','type':'str'}, 'top_date':{'key':'top_date', 'type':'iso-8601'}, 'top_dates':{'key':'top_dates', 'type':'[iso-8601]'}, 'insider':{'key':'insider','type':'{iso-8601}'}, 'top_complex':{'key':'top_complex','type':'ComplexId'}} p1 = 'value1' p2 = 'value2' top_date = isodate.parse_datetime('2014-01-01T00:00:00') top_dates = [isodate.parse_datetime('1900-01-01T00:00:00'), isodate.parse_datetime('1901-01-01T00:00:00')] insider = { 'k1': isodate.parse_datetime('2015-01-01T00:00:00'), 'k2': isodate.parse_datetime('2016-01-01T00:00:00'), 'k3': isodate.parse_datetime('2017-01-01T00:00:00')} top_complex = ComplexId() message =self.s._serialize(ComplexJson()) output = { 'p1': 'value1', 'p2': 'value2', 'top_date': '2014-01-01T00:00:00.000Z', 'top_dates': [ '1900-01-01T00:00:00.000Z', '1901-01-01T00:00:00.000Z' ], 'insider': { 'k1': '2015-01-01T00:00:00.000Z', 'k2': '2016-01-01T00:00:00.000Z', 'k3': '2017-01-01T00:00:00.000Z' }, 'top_complex': { 'id': 1, 'name': 'Joey', 'age': 23.36, 'male': True, 'birthday': '1992-01-01T00:00:00.000Z', 'anniversary': '2013-12-08T00:00:00.000Z', } } self.maxDiff = None self.assertEqual(message, output) message = ComplexJson().serialize() self.assertEqual(message, output) def test_polymorphic_serialization(self): self.maxDiff = None class Zoo(Model): _attribute_map = { "animals":{"key":"Animals", "type":"[Animal]"}, } def __init__(self, animals=None): self.animals = animals class Animal(Model): _attribute_map = { "name":{"key":"Name", "type":"str"}, "d_type":{"key":"dType", "type":"str"} } _subtype_map = { 'd_type': {"cat":"Cat", "dog":"Dog"} } def __init__(self, name=None): self.name = name class Dog(Animal): _attribute_map = { "name":{"key":"Name", "type":"str"}, "likes_dog_food":{"key":"likesDogFood","type":"bool"}, "d_type":{"key":"dType", "type":"str"} } def __init__(self, name=None, likes_dog_food=None): self.likes_dog_food = likes_dog_food super(Dog, self).__init__(name) self.d_type = 'dog' class Cat(Animal): _attribute_map = { "name":{"key":"Name", "type":"str"}, "likes_mice":{"key":"likesMice","type":"bool"}, "dislikes":{"key":"dislikes","type":"Animal"}, "d_type":{"key":"dType", "type":"str"} } _subtype_map = { "d_type":{"siamese":"Siamese"} } def __init__(self, name=None, likes_mice=None, dislikes = None): self.likes_mice = likes_mice self.dislikes = dislikes super(Cat, self).__init__(name) self.d_type = 'cat' class Siamese(Cat): _attribute_map = { "name":{"key":"Name", "type":"str"}, "likes_mice":{"key":"likesMice","type":"bool"}, "dislikes":{"key":"dislikes","type":"Animal"}, "color":{"key":"Color", "type":"str"}, "d_type":{"key":"dType", "type":"str"} } def __init__(self, name=None, likes_mice=None, dislikes = None, color=None): self.color = color super(Siamese, self).__init__(name, likes_mice, dislikes) self.d_type = 'siamese' message = { "Animals": [ { "dType": "dog", "likesDogFood": True, "Name": "Fido" }, { "dType": "cat", "likesMice": False, "dislikes": { "dType": "dog", "likesDogFood": True, "Name": "Angry" }, "Name": "Felix" }, { "dType": "siamese", "Color": "grey", "likesMice": True, "Name": "Finch" }]} zoo = Zoo() angry = Dog() angry.name = "Angry" angry.likes_dog_food = True fido = Dog() fido.name = "Fido" fido.likes_dog_food = True felix = Cat() felix.name = "Felix" felix.likes_mice = False felix.dislikes = angry finch = Siamese() finch.name = "Finch" finch.color = "grey" finch.likes_mice = True zoo.animals = [fido, felix, finch] serialized = self.s._serialize(zoo) self.assertEqual(serialized, message) old_dependencies = self.s.dependencies self.s.dependencies = { 'Zoo': Zoo, 'Animal': Animal, 'Dog': Dog, 'Cat': Cat, 'Siamese': Siamese } serialized = self.s.body({ "animals": [{ "dType": "dog", "likes_dog_food": True, "name": "Fido" },{ "dType": "cat", "likes_mice": False, "dislikes": { "dType": "dog", "likes_dog_food": True, "name": "Angry" }, "name": "Felix" },{ "dType": "siamese", "color": "grey", "likes_mice": True, "name": "Finch" }] }, "Zoo") self.assertEqual(serialized, message) self.s.dependencies = old_dependencies def test_key_type(self): class TestKeyTypeObj(Model): _validation = {} _attribute_map = { 'attr_a': {'key':'attr_a', 'type':'int'}, 'attr_b': {'key':'id', 'type':'int'}, 'attr_c': {'key':'KeyC', 'type': 'int'}, 'attr_d': {'key':'properties.KeyD', 'type': 'int'}, } old_dependencies = self.s.dependencies self.s.dependencies = { 'TestKeyTypeObj': TestKeyTypeObj, } serialized = self.s.body({ "attr_a": 1, "id": 2, "keyc": 3, "keyd": 4 }, "TestKeyTypeObj") message = { "attr_a": 1, "id": 2, "KeyC": 3, "properties": { "KeyD": 4 } } self.assertEqual(serialized, message) self.s.dependencies = old_dependencies def test_additional_properties_no_send(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"} } def __init__(self, name=None): self.name = name o = AdditionalTest( name='test' ) o.additional_properties={ "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } expected_message = { "Name": "test" } s = Serializer({'AdditionalTest': AdditionalTest}) serialized = s.body(o, 'AdditionalTest') self.assertEqual(serialized, expected_message) def test_additional_properties_manual(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"} } def __init__(self, name=None): self.name = name AdditionalTest.enable_additional_properties_sending() o = AdditionalTest( name='test' ) o.additional_properties={ "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } expected_message = { "Name": "test", "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } s = Serializer({'AdditionalTest': AdditionalTest}) serialized = s.body(o, 'AdditionalTest') self.assertEqual(serialized, expected_message) def test_additional_properties(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"}, 'additional_properties': {'key': '', 'type': '{object}'} } def __init__(self, name=None, additional_properties=None): self.name = name self.additional_properties = additional_properties o = AdditionalTest( name='test', additional_properties={ "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } ) expected_message = { "Name": "test", "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } s = Serializer({'AdditionalTest': AdditionalTest}) serialized = s.body(o, 'AdditionalTest') self.assertEqual(serialized, expected_message) def test_additional_properties_with_auto_model(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"}, "display_name": {"key":"DisplayName", "type":"str"}, 'additional_properties': {'key': '', 'type': '{object}'} } o = { 'name': 'test', 'display_name': "display_name" } expected_message = { "Name": "test", "DisplayName": "display_name", } s = Serializer({'AdditionalTest': AdditionalTest}) serialized = s.body(o, 'AdditionalTest') self.assertEqual(serialized, expected_message) def test_additional_properties_declared(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"}, 'additional_properties': {'key': 'AddProp', 'type': '{object}'} } def __init__(self, name=None, additional_properties=None): self.name = name self.additional_properties = additional_properties o = AdditionalTest( name='test', additional_properties={ "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } ) expected_message = { "Name": "test", "AddProp": { "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } } s = Serializer({'AdditionalTest': AdditionalTest}) serialized = s.body(o, 'AdditionalTest') self.assertEqual(serialized, expected_message) # Make it declared as a property AND readonly AdditionalTest._validation = { 'additional_properties': {'readonly': True} } expected_message = { "Name": "test" } s = Serializer({'AdditionalTest': AdditionalTest}) serialized = s.body(o, 'AdditionalTest') self.assertEqual(serialized, expected_message) def test_long_as_type_object(self): """Test irrelevant on Python 3. But still doing it to test regresssion. https://github.com/Azure/msrest-for-python/pull/121 """ try: long_type = long except NameError: long_type = int s = Serializer() assert s.serialize_data(long_type(1), 'object') == long_type(1) class TestModel(Model): _attribute_map = {'data': {'key': 'data', 'type': 'object'}} m = TestModel(data = {'id': long_type(1)}) serialized = m.serialize() assert serialized == { 'data': {'id': long_type(1)} } def test_unicode_as_type_object(self): """Test irrelevant on Python 3. But still doing it to test regresssion. https://github.com/Azure/msrest-for-python/issue/221 """ s = Serializer() assert s.serialize_data(u"\ua015", 'object') == u"\ua015" class TestModel(Model): _attribute_map = {'data': {'key': 'data', 'type': 'object'}} m = TestModel(data = {'id': u"\ua015"}) serialized = m.serialize() assert serialized == { 'data': {'id': u"\ua015"} } def test_datetime_types_as_type_object(self): """https://github.com/Azure/msrest-for-python/issues/223 """ class TestModel(Model): _attribute_map = {'data': {'key': 'data', 'type': 'object'}} m = TestModel(data = { 'datetime': isodate.parse_datetime('2012-02-24T00:53:52.780Z'), 'date': date(2019,5,1), 'time': time(11,12,13), 'timedelta': timedelta(56) }) serialized = m.serialize() assert serialized['data'] == { 'datetime': '2012-02-24T00:53:52.780Z', 'date': '2019-05-01', 'time': '11:12:13', 'timedelta': 'P56D' } def test_decimal_types_as_type_object(self): """https://github.com/Azure/msrest-for-python/issues/223 """ class TestModel(Model): _attribute_map = {'data': {'key': 'data', 'type': 'object'}} m = TestModel(data = { 'decimal': Decimal('1.1'), }) serialized = m.serialize() assert serialized['data'] == { 'decimal': 1.1 } def test_json_with_xml_map(self): basic_json = {'age': 37, 'country': 'france'} class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age'}}, 'country': {'key': 'country', 'type': 'str', 'xml':{'name': 'country', 'attr': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( age=37, country="france", ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel', is_xml=False) assert rawxml==basic_json class TestRuntimeDeserialized(unittest.TestCase): class TestObj(Model): _validation = {} _attribute_map = { 'attr_a': {'key':'id', 'type':'str'}, 'attr_b': {'key':'AttrB', 'type':'int'}, 'attr_c': {'key':'Key_C', 'type': 'bool'}, 'attr_d': {'key':'AttrD', 'type':'[int]'}, 'attr_e': {'key':'AttrE', 'type': '{float}'}, 'attr_f': {'key':'AttrF', 'type': '[[str]]'} } _header_map = { 'client_request_id': {'key': 'client-request-id', 'type':'str'}, 'e_tag': {'key': 'etag', 'type':'str'}, } _response_map = { 'status_code': {'key':'status_code', 'type':'str'} } def setUp(self): self.d = Deserializer() return super(TestRuntimeDeserialized, self).setUp() def test_cls_method_deserialization(self): json_data = { 'id': 'myid', 'AttrB': 42, 'Key_C': True, 'AttrD': [1,2,3], 'AttrE': {'pi': 3.14}, 'AttrF': [['internal', 'list', 'of', 'strings']] } def assert_model(inst): self.assertEqual(inst.attr_a, 'myid') self.assertEqual(inst.attr_b, 42) self.assertEqual(inst.attr_c, True) self.assertEqual(inst.attr_d, [1,2,3]) self.assertEqual(inst.attr_e, {'pi': 3.14}) self.assertEqual(inst.attr_f, [['internal', 'list', 'of', 'strings']]) model_instance = self.TestObj.from_dict(json_data) assert_model(model_instance) # Get an attribute version of this model attr_data = { 'attr_a': 'myid', 'attr_b': 42, 'attr_c': True, 'attr_d': [1,2,3], 'attr_e': {'pi': 3.14}, 'attr_f': [['internal', 'list', 'of', 'strings']] } self.TestObj.from_dict(attr_data) assert_model(model_instance) def test_twice_key_scenario(self): # Te reproduce the initial bug, you need a attribute named after the last part # of a flattening JSON from another attribute (here type) # https://github.com/Azure/azure-sdk-for-python/issues/11422 # Issue happend where searching for "type2", since we found a match in both "type2" and "type" keys class LocalModel(Model): _attribute_map = { 'id': {'key': 'id', 'type': 'int'}, 'type': {'key': 'type_dont_matter_not_used', 'type': 'str'}, 'type2': {'key': 'properties.type', 'type': 'str'}, } def __init__(self, **kwargs): super(LocalModel, self).__init__(**kwargs) raw = { 'id': 42, 'type': "type", 'type2': "type2" } m = LocalModel.from_dict(raw) assert m.id == 42 assert m.type == "type" assert m.type2 == "type2" def test_array_deserialize(self): result = self.d('[str]', ["a","b"]) assert result == ['a','b'] def test_personalize_deserialization(self): class TestDurationObj(Model): _attribute_map = { 'attr_a': {'key':'attr_a', 'type':'duration'}, } with self.assertRaises(DeserializationError): obj = TestDurationObj.from_dict({ "attr_a": "00:00:10" }) def duration_rest_key_extractor(attr, attr_desc, data): value = rest_key_extractor(attr, attr_desc, data) if attr == "attr_a": # Stupid parsing, this is just a test return "PT"+value[-2:]+"S" obj = TestDurationObj.from_dict( {"attr_a": "00:00:10"}, key_extractors=[duration_rest_key_extractor] ) self.assertEqual(timedelta(seconds=10), obj.attr_a) def test_robust_deserialization(self): class TestKeyTypeObj(Model): _validation = {} _attribute_map = { 'attr_a': {'key':'attr_a', 'type':'int'}, 'attr_b': {'key':'id', 'type':'int'}, 'attr_c': {'key':'KeyC', 'type': 'int'}, 'attr_d': {'key':'properties.KeyD', 'type': 'int'}, } obj = TestKeyTypeObj.from_dict({ "attr_a": 1, "id": 2, "keyc": 3, "keyd": 4 }) self.assertEqual(1, obj.attr_a) self.assertEqual(2, obj.attr_b) self.assertEqual(3, obj.attr_c) self.assertEqual(4, obj.attr_d) obj = TestKeyTypeObj.from_dict({ "attr_a": 1, "id": 2, "keyc": 3, "properties": { "KeyD": 4 } }) self.assertEqual(1, obj.attr_a) self.assertEqual(2, obj.attr_b) self.assertEqual(3, obj.attr_c) self.assertEqual(4, obj.attr_d) # This one used to raise an exception, but after https://github.com/Azure/msrest-for-python/pull/204 # we decide to accept it with log warning obj = TestKeyTypeObj.from_dict({ "attr_a": 1, "attr_b": 12, # Conflict with "id" "id": 14, # Conflict with "attr_b" "keyc": 3, "keyd": 4 }) self.assertEqual(1, obj.attr_a) self.assertEqual(12, obj.attr_b) # from_dict will prioritize attribute syntax self.assertEqual(3, obj.attr_c) self.assertEqual(4, obj.attr_d) def test_basic_deserialization(self): class TestObj(Model): _validation = { 'name': {'min_length': 3}, } _attribute_map = { 'name': {'key':'RestName', 'type':'str'}, } def __init__(self, name): self.name = name obj = TestObj.from_dict({'name': 'ab'}) self.assertEqual('ab', obj.name) def test_deserialize_flattening(self): # https://github.com/Azure/msrest-for-python/issues/197 json_body = { "properties" : { "properties": None } } class ComputeResource(Model): _attribute_map = { 'properties': {'key': 'properties', 'type': 'VirtualMachine'}, } def __init__(self, properties=None, **kwargs): self.properties = properties class VirtualMachine(Model): _attribute_map = { 'virtual_machine_size': {'key': 'properties.virtualMachineSize', 'type': 'str'}, 'ssh_port': {'key': 'properties.sshPort', 'type': 'int'}, 'address': {'key': 'properties.address', 'type': 'str'}, 'administrator_account': {'key': 'properties.administratorAccount', 'type': 'VirtualMachineSshCredentials'}, } def __init__(self, **kwargs): super(VirtualMachine, self).__init__(**kwargs) self.virtual_machine_size = kwargs.get('virtual_machine_size', None) self.ssh_port = kwargs.get('ssh_port', None) self.address = kwargs.get('address', None) self.administrator_account = kwargs.get('administrator_account', None) d = Deserializer({ 'ComputeResource': ComputeResource, 'VirtualMachine': VirtualMachine, }) response = d(ComputeResource, json.dumps(json_body), 'application/json') def test_deserialize_storage(self): StorageAccount = storage_models.StorageAccount json_storage = { 'id': '/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/test_mgmt_storage_test_storage_accounts43b8102a/providers/Microsoft.Storage/storageAccounts/pyarmstorage43b8102a', 'kind': 'Storage', 'location': 'westus', 'name': 'pyarmstorage43b8102a', 'properties': { 'creationTime': '2017-07-19T23:19:21.7640412Z', 'primaryEndpoints': {'blob': 'https://pyarmstorage43b8102a.blob.core.windows.net/', 'file': 'https://pyarmstorage43b8102a.file.core.windows.net/', 'queue': 'https://pyarmstorage43b8102a.queue.core.windows.net/', 'table': 'https://pyarmstorage43b8102a.table.core.windows.net/'}, 'primaryLocation': 'westus', 'provisioningState': 'Succeeded', 'statusOfPrimary': 'available', 'supportsHttpsTrafficOnly': False}, 'sku': {'name': 'Standard_LRS', 'tier': 'Standard'}, 'tags': {}, 'type': 'Microsoft.Storage/storageAccounts'} storage_account = StorageAccount.deserialize(json_storage) self.assertEqual(storage_account.id, json_storage['id']) # basic self.assertEqual(storage_account.sku.name, storage_models.SkuName(json_storage['sku']['name'])) # Nested + enum self.assertEqual(storage_account.primary_location, json_storage['properties']['primaryLocation']) # Flatten json_storage_output = storage_account.serialize() self.assertEqual(len(json_storage_output), 3) # Only 3 keys are not readonly json_storage_output = storage_account.as_dict(key_transformer=full_restapi_key_transformer) self.assertListEqual( sorted(list(json_storage_output.keys())), sorted(list(json_storage.keys())) ) json_storage_output = storage_account.as_dict(keep_readonly=False, key_transformer=full_restapi_key_transformer) self.assertListEqual( sorted(list(json_storage_output.keys())), ['location', 'properties', 'tags'] ) def test_invalid_json(self): """ Test invalid JSON """ with self.assertRaises(DeserializationError): self.d("[str]", '["tata"]]', 'application/json') def test_non_obj_deserialization(self): """ Test direct deserialization of simple types. """ with self.assertRaises(DeserializationError): self.d("[str]", '', 'application/json') with self.assertRaises(DeserializationError): self.d("[str]", json.dumps(''), 'application/json') with self.assertRaises(DeserializationError): self.d("[str]", json.dumps({}), 'application/json') message = ["a","b","b"] response = self.d("[str]", json.dumps(message), 'application/json') self.assertEqual(response, message) with self.assertRaises(DeserializationError): self.d("[str]", json.dumps(12345), 'application/json') response = self.d('bool', json.dumps('true'), 'application/json') self.assertEqual(response, True) response = self.d('bool', json.dumps(1), 'application/json') self.assertEqual(response, True) with self.assertRaises(DeserializationError): self.d('bool', json.dumps("true1"), 'application/json') def test_obj_with_no_attr(self): """ Test deserializing an object with no attributes. """ class EmptyResponse(Model): _attribute_map = {} _header_map = {} deserialized = self.d(EmptyResponse, json.dumps({"a":"b"}), 'application/json') self.assertIsInstance(deserialized, EmptyResponse) def test_obj_with_malformed_map(self): """ Test deserializing an object with a malformed attributes_map. """ class BadResponse(Model): _attribute_map = None def __init__(*args, **kwargs): pass with self.assertRaises(DeserializationError): self.d(BadResponse, json.dumps({"a":"b"}), 'application/json') class BadResponse(Model): _attribute_map = {"attr":"val"} def __init__(*args, **kwargs): pass with self.assertRaises(DeserializationError): self.d(BadResponse, json.dumps({"a":"b"}), 'application/json') class BadResponse(Model): _attribute_map = {"attr":{"val":1}} def __init__(*args, **kwargs): pass with self.assertRaises(DeserializationError): self.d(BadResponse, json.dumps({"a":"b"}), 'application/json') def test_attr_none(self): """ Test serializing an object with None attributes. """ response = self.d(self.TestObj, 'null', 'application/json') self.assertIsNone(response) def test_attr_int(self): """ Test deserializing an object with Int attributes. """ message = {'AttrB':'1234'} response = self.d(self.TestObj, json.dumps(message), 'application/json') self.assertTrue(hasattr(response, 'attr_b')) self.assertEqual(response.attr_b, int(message['AttrB'])) with self.assertRaises(DeserializationError): response = self.d(self.TestObj, json.dumps({'AttrB':'NotANumber'}), 'application/json') def test_attr_str(self): """ Test deserializing an object with Str attributes. """ message = {'id':'InterestingValue'} response = self.d(self.TestObj, json.dumps(message), 'application/json') self.assertTrue(hasattr(response, 'attr_a')) self.assertEqual(response.attr_a, message['id']) message = {'id':1234} response = self.d(self.TestObj, json.dumps(message), 'application/json') self.assertEqual(response.attr_a, str(message['id'])) message = {'id':list()} response = self.d(self.TestObj, json.dumps(message), 'application/json') self.assertEqual(response.attr_a, str(message['id'])) response = self.d(self.TestObj, json.dumps({'id':None}), 'application/json') self.assertEqual(response.attr_a, None) def test_attr_bool(self): """ Test deserializing an object with bool attributes. """ response = self.d(self.TestObj, json.dumps({'Key_C':True}), 'application/json') self.assertTrue(hasattr(response, 'attr_c')) self.assertEqual(response.attr_c, True) with self.assertRaises(DeserializationError): response = self.d(self.TestObj, json.dumps({'Key_C':[]}), 'application/json') response = self.d(self.TestObj, json.dumps({'Key_C':0}), 'application/json') self.assertEqual(response.attr_c, False) with self.assertRaises(DeserializationError): response = self.d(self.TestObj, json.dumps({'Key_C':"value"}), 'application/json') def test_attr_list_simple(self): """ Test deserializing an object with simple-typed list attributes """ response = self.d(self.TestObj, json.dumps({'AttrD': []}), 'application/json') deserialized_list = [d for d in response.attr_d] self.assertEqual(deserialized_list, []) message = {'AttrD': [1,2,3]} response = self.d(self.TestObj, json.dumps(message), 'application/json') deserialized_list = [d for d in response.attr_d] self.assertEqual(deserialized_list, message['AttrD']) message = {'AttrD': ["1","2","3"]} response = self.d(self.TestObj, json.dumps(message), 'application/json') deserialized_list = [d for d in response.attr_d] self.assertEqual(deserialized_list, [int(i) for i in message['AttrD']]) with self.assertRaises(DeserializationError): response = self.d(self.TestObj, json.dumps({'AttrD': ["test","test2","test3"]}), 'application/json') deserialized_list = [d for d in response.attr_d] with self.assertRaises(DeserializationError): response = self.d(self.TestObj, json.dumps({'AttrD': "NotAList"}), 'application/json') deserialized_list = [d for d in response.attr_d] self.assertListEqual(sorted(self.d("[str]", ["a", "b", "c"])), ["a", "b", "c"]) self.assertListEqual(sorted(self.d("[str]", {"a", "b", "c"})), ["a", "b", "c"]) def test_attr_list_in_list(self): """ Test deserializing a list of lists """ response = self.d(self.TestObj, json.dumps({'AttrF':[]}), 'application/json') self.assertTrue(hasattr(response, 'attr_f')) self.assertEqual(response.attr_f, []) response = self.d(self.TestObj, json.dumps({'AttrF':None}), 'application/json') self.assertTrue(hasattr(response, 'attr_f')) self.assertEqual(response.attr_f, None) response = self.d(self.TestObj, json.dumps({}), 'application/json') self.assertTrue(hasattr(response, 'attr_f')) self.assertEqual(response.attr_f, None) message = {'AttrF':[[]]} response = self.d(self.TestObj, json.dumps(message), 'application/json') self.assertTrue(hasattr(response, 'attr_f')) self.assertEqual(response.attr_f, message['AttrF']) message = {'AttrF':[[1,2,3], ['a','b','c']]} response = self.d(self.TestObj, json.dumps(message), 'application/json') self.assertTrue(hasattr(response, 'attr_f')) self.assertEqual(response.attr_f, [[str(i) for i in k] for k in message['AttrF']]) with self.assertRaises(DeserializationError): response = self.d(self.TestObj, json.dumps({'AttrF':[1,2,3]}), 'application/json') def test_attr_list_complex(self): """ Test deserializing an object with a list of complex objects as an attribute. """ class ListObj(Model): _attribute_map = {"abc":{"key":"ABC", "type":"int"}} class CmplxTestObj(Model): _response_map = {} _attribute_map = {'attr_a': {'key':'id', 'type':'[ListObj]'}} d = Deserializer({'ListObj':ListObj}) response = d(CmplxTestObj, json.dumps({"id":[{"ABC": "123"}]}), 'application/json') deserialized_list = list(response.attr_a) self.assertIsInstance(deserialized_list[0], ListObj) self.assertEqual(deserialized_list[0].abc, 123) def test_deserialize_object(self): a = self.d('object', 1) self.assertEqual(a, 1) b = self.d('object', True) self.assertEqual(b, True) c = self.d('object', 'True') self.assertEqual(c, 'True') d = self.d('object', 100.0123) self.assertEqual(d, 100.0123) e = self.d('object', {}) self.assertEqual(e, {}) f = self.d('object', {"test":"data"}) self.assertEqual(f, {"test":"data"}) g = self.d('object', {"test":{"value":"data"}}) self.assertEqual(g, {"test":{"value":"data"}}) with self.assertRaises(DeserializationError): self.d('object', {"test":self.TestObj()}) h = self.d('object', {"test":[1,2,3,4,5]}) self.assertEqual(h, {"test":[1,2,3,4,5]}) def test_deserialize_date(self): # https://github.com/OAI/OpenAPI-Specification/blob/4d5a749c365682e6718f5a78f113a64391911647/versions/2.0.md#data-types a = Deserializer.deserialize_date('2018-12-27') self.assertEqual(date(2018,12,27), a) with self.assertRaises(DeserializationError): a = Deserializer.deserialize_date('201O-18-90') def test_deserialize_time(self): a = Deserializer.deserialize_time('11:22:33') assert time(11,22,33) == a with self.assertRaises(DeserializationError): Deserializer.deserialize_time('1O:22:33') def test_deserialize_datetime(self): a = Deserializer.deserialize_iso('9999-12-31T23:59:59+23:59') utc = a.utctimetuple() self.assertEqual(utc.tm_year, 9999) self.assertEqual(utc.tm_mon, 12) self.assertEqual(utc.tm_mday, 31) self.assertEqual(utc.tm_hour, 0) self.assertEqual(utc.tm_min, 0) self.assertEqual(utc.tm_sec, 59) self.assertEqual(a.microsecond, 0) with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('9999-12-31T23:59:59-23:59') a = Deserializer.deserialize_iso('1999-12-31T23:59:59-23:59') utc = a.utctimetuple() self.assertEqual(utc.tm_year, 2000) self.assertEqual(utc.tm_mon, 1) self.assertEqual(utc.tm_mday, 1) self.assertEqual(utc.tm_hour, 23) self.assertEqual(utc.tm_min, 58) self.assertEqual(utc.tm_sec, 59) self.assertEqual(a.microsecond, 0) a = Deserializer.deserialize_iso('0001-01-01T23:59:00+23:59') utc = a.utctimetuple() self.assertEqual(utc.tm_year, 1) self.assertEqual(utc.tm_mon, 1) self.assertEqual(utc.tm_mday, 1) self.assertEqual(utc.tm_hour, 0) self.assertEqual(utc.tm_min, 0) self.assertEqual(utc.tm_sec, 0) self.assertEqual(a.microsecond, 0) # Only supports microsecond precision up to 6 digits, and chop off the rest a = Deserializer.deserialize_iso('2018-01-20T18:35:24.666666312345Z') utc = a.utctimetuple() self.assertEqual(utc.tm_year, 2018) self.assertEqual(utc.tm_mon, 1) self.assertEqual(utc.tm_mday, 20) self.assertEqual(utc.tm_hour, 18) self.assertEqual(utc.tm_min, 35) self.assertEqual(utc.tm_sec, 24) self.assertEqual(a.microsecond, 666666) #with self.assertRaises(DeserializationError): # a = Deserializer.deserialize_iso('1996-01-01T23:01:54-22:66') #TODO with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('1996-01-01T23:01:54-24:30') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('1996-01-01T23:01:78+00:30') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('1996-01-01T23:60:01+00:30') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('1996-01-01T24:01:01+00:30') #with self.assertRaises(DeserializationError): # a = Deserializer.deserialize_iso('1996-01-01t01:01:01/00:30') #TODO with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('1996-01-01F01:01:01+00:30') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('2015-02-32') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('2015-22-01') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('2010-13-31') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('99999-12-31') with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso(True) with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso(2010) with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso(None) with self.assertRaises(DeserializationError): a = Deserializer.deserialize_iso('Happy New Year 2016') a = Deserializer.deserialize_iso('2012-02-24T00:53:52.780Z') utc = a.utctimetuple() self.assertEqual(utc.tm_year, 2012) self.assertEqual(utc.tm_mon, 2) self.assertEqual(utc.tm_mday, 24) self.assertEqual(utc.tm_hour, 0) self.assertEqual(utc.tm_min, 53) self.assertEqual(utc.tm_sec, 52) self.assertEqual(a.microsecond, 780000) def test_deserialize_datetime_rfc(self): a = Deserializer.deserialize_rfc("Mon, 20 Nov 1995 19:12:08 -0500") utc = a.utctimetuple() # UTC: 21 Nov, 00:12:08 self.assertEqual(utc.tm_year, 1995) self.assertEqual(utc.tm_mon, 11) self.assertEqual(utc.tm_mday, 21) self.assertEqual(utc.tm_hour, 0) self.assertEqual(utc.tm_min, 12) self.assertEqual(utc.tm_sec, 8) self.assertEqual(a.microsecond, 0) a = Deserializer.deserialize_rfc("Mon, 20 Nov 1995 19:12:08 CDT") utc = a.utctimetuple() # UTC: 21 Nov, 00:12:08 self.assertEqual(utc.tm_year, 1995) self.assertEqual(utc.tm_mon, 11) self.assertEqual(utc.tm_mday, 21) self.assertEqual(utc.tm_hour, 0) self.assertEqual(utc.tm_min, 12) self.assertEqual(utc.tm_sec, 8) self.assertEqual(a.microsecond, 0) a = Deserializer.deserialize_rfc("Mon, 20 Nov 1995 19:12:08") utc = a.utctimetuple() # UTC: No info is considered UTC self.assertEqual(utc.tm_year, 1995) self.assertEqual(utc.tm_mon, 11) self.assertEqual(utc.tm_mday, 20) self.assertEqual(utc.tm_hour, 19) self.assertEqual(utc.tm_min, 12) self.assertEqual(utc.tm_sec, 8) self.assertEqual(a.microsecond, 0) a = Deserializer.deserialize_rfc("Mon, 20 Nov 1995 19:12:08 GMT") utc = a.utctimetuple() self.assertEqual(utc.tm_year, 1995) self.assertEqual(utc.tm_mon, 11) self.assertEqual(utc.tm_mday, 20) self.assertEqual(utc.tm_hour, 19) self.assertEqual(utc.tm_min, 12) self.assertEqual(utc.tm_sec, 8) self.assertEqual(a.microsecond, 0) def test_rfc_pickable(self): """Check datetime created by RFC parser are pickable. See https://github.com/Azure/msrest-for-python/issues/205 """ datetime_rfc = "Mon, 25 May 2020 11:00:00 GMT" datetime1 = Deserializer.deserialize_rfc(datetime_rfc) pickled = pickle.dumps(datetime1) datetime2 = pickle.loads(pickled) assert datetime1 == datetime2 def test_polymorphic_deserialization(self): class Zoo(Model): _attribute_map = { "animals":{"key":"Animals", "type":"[Animal]"}, } def __init__(self, animals=None): self.animals = animals class Animal(Model): _attribute_map = { "name":{"key":"Name", "type":"str"}, "d_type":{"key":"dType", "type":"str"} } _subtype_map = { 'd_type': {"cat":"Cat", "dog":"Dog"} } def __init__(self, name=None): self.name = name class Dog(Animal): _attribute_map = { "name":{"key":"Name", "type":"str"}, "likes_dog_food":{"key":"likesDogFood","type":"bool"}, "d_type":{"key":"dType", "type":"str"} } def __init__(self, name=None, likes_dog_food=None): self.likes_dog_food = likes_dog_food super(Dog, self).__init__(name) self.d_type = 'dog' class Cat(Animal): _attribute_map = { "name":{"key":"Name", "type":"str"}, "likes_mice":{"key":"likesMice","type":"bool"}, "dislikes":{"key":"dislikes","type":"Animal"}, "d_type":{"key":"dType", "type":"str"} } _subtype_map = { "d_type":{"siamese":"Siamese"} } def __init__(self, name=None, likes_mice=None, dislikes = None): self.likes_mice = likes_mice self.dislikes = dislikes super(Cat, self).__init__(name) self.d_type = 'cat' class Siamese(Cat): _attribute_map = { "name":{"key":"Name", "type":"str"}, "likes_mice":{"key":"likesMice","type":"bool"}, "dislikes":{"key":"dislikes","type":"Animal"}, "color":{"key":"Color", "type":"str"}, "d_type":{"key":"dType", "type":"str"} } def __init__(self, name=None, likes_mice=None, dislikes = None, color=None): self.color = color super(Siamese, self).__init__(name, likes_mice, dislikes) self.d_type = 'siamese' message = { "Animals": [{ "dType": "dog", "likesDogFood": True, "Name": "Fido" },{ "dType": "cat", "likesMice": False, "dislikes": { "dType": "dog", "likesDogFood": True, "Name": "Angry" }, "Name": "Felix" },{ "dType": "siamese", "Color": "grey", "likesMice": True, "Name": "Finch" }] } self.d.dependencies = { 'Zoo':Zoo, 'Animal':Animal, 'Dog':Dog, 'Cat':Cat, 'Siamese':Siamese} zoo = self.d(Zoo, message) animals = [a for a in zoo.animals] self.assertEqual(len(animals), 3) self.assertIsInstance(animals[0], Dog) self.assertTrue(animals[0].likes_dog_food) self.assertEqual(animals[0].name, message['Animals'][0]["Name"]) self.assertIsInstance(animals[1], Cat) self.assertFalse(animals[1].likes_mice) self.assertIsInstance(animals[1].dislikes, Dog) self.assertEqual(animals[1].dislikes.name, message['Animals'][1]["dislikes"]["Name"]) self.assertEqual(animals[1].name, message['Animals'][1]["Name"]) self.assertIsInstance(animals[2], Siamese) self.assertEqual(animals[2].color, message['Animals'][2]["Color"]) self.assertTrue(animals[2].likes_mice) message = { "Name": "Didier", "dType": "Animal" } animal = self.d(Animal, message) self.assertIsInstance(animal, Animal) self.assertEqual(animal.name, "Didier") @unittest.skipIf(sys.version_info < (3,4), "assertLogs not supported before 3.4") def test_polymorphic_missing_info(self): class Animal(Model): _attribute_map = { "name":{"key":"Name", "type":"str"}, "d_type":{"key":"dType", "type":"str"} } _subtype_map = { 'd_type': {} } def __init__(self, name=None): self.name = name message = { "Name": "Didier" } with self.assertLogs('msrest.serialization', level="WARNING"): animal = self.d(Animal, message) self.assertEqual(animal.name, "Didier") message = { "dType": "Penguin", "likesDogFood": True, "Name": "Fido" } with self.assertLogs('msrest.serialization', level="WARNING"): animal = self.d(Animal, message) self.assertEqual(animal.name, "Fido") def test_polymorphic_deserialization_with_escape(self): class Animal(Model): _attribute_map = { "name":{"key":"Name", "type":"str"}, "d_type":{"key":"odata\\.type", "type":"str"} } _subtype_map = { 'd_type': {"dog":"Dog"} } def __init__(self, name=None): self.name = name class Dog(Animal): _attribute_map = { "name":{"key":"Name", "type":"str"}, "likes_dog_food":{"key":"likesDogFood","type":"bool"}, "d_type":{"key":"odata\\.type", "type":"str"} } def __init__(self, name=None, likes_dog_food=None): self.likes_dog_food = likes_dog_food super(Dog, self).__init__(name) self.d_type = 'dog' message = { "odata.type": "dog", "likesDogFood": True, "Name": "Fido" } self.d.dependencies = { 'Animal':Animal, 'Dog':Dog} animal = self.d('Animal', message) self.assertIsInstance(animal, Dog) self.assertTrue(animal.likes_dog_food) def test_additional_properties(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"}, "display_name": {"key":"DisplayName", "type":"str"}, 'additional_properties': {'key': '', 'type': '{object}'} } message = { "Name": "test", "DisplayName": "diplay_name", "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } d = Deserializer({'AdditionalTest': AdditionalTest}) m = d('AdditionalTest', message) self.assertEqual(m.name, "test") self.assertEqual(m.display_name, "diplay_name") self.assertEqual(m.additional_properties['PropInt'], 2) self.assertEqual(m.additional_properties['PropStr'], "AdditionalProperty") self.assertEqual(m.additional_properties['PropArray'], [1,2,3]) self.assertEqual(m.additional_properties['PropDict'], {"a": "b"}) def test_additional_properties_declared(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"}, 'additional_properties': {'key': 'AddProp', 'type': '{object}'} } def __init__(self, name=None, additional_properties=None): self.name = name self.additional_properties = additional_properties message = { "Name": "test", "AddProp": { "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } } d = Deserializer({'AdditionalTest': AdditionalTest}) m = d('AdditionalTest', message) self.assertEqual(m.name, "test") self.assertEqual(m.additional_properties['PropInt'], 2) self.assertEqual(m.additional_properties['PropStr'], "AdditionalProperty") self.assertEqual(m.additional_properties['PropArray'], [1,2,3]) self.assertEqual(m.additional_properties['PropDict'], {"a": "b"}) def test_additional_properties_not_configured(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"} } def __init__(self, name=None): self.name = name message = { "Name": "test", "PropInt": 2, "PropStr": "AdditionalProperty", "PropArray": [1,2,3], "PropDict": {"a": "b"} } d = Deserializer({'AdditionalTest': AdditionalTest}) m = d('AdditionalTest', message) self.assertEqual(m.name, "test") self.assertEqual(m.additional_properties['PropInt'], 2) self.assertEqual(m.additional_properties['PropStr'], "AdditionalProperty") self.assertEqual(m.additional_properties['PropArray'], [1,2,3]) self.assertEqual(m.additional_properties['PropDict'], {"a": "b"}) def test_additional_properties_flattening(self): class AdditionalTest(Model): _attribute_map = { "name": {"key":"Name", "type":"str"}, "content" :{"key":"Properties.Content", "type":"str"} } def __init__(self, name=None, content=None): super(AdditionalTest, self).__init__() self.name = name self.content = content message = { "Name": "test", "Properties": { "Content": "Content", "Unknown": "Unknown" } } d = Deserializer({'AdditionalTest': AdditionalTest}) m = d('AdditionalTest', message) self.assertEqual(m.name, "test") self.assertEqual(m.content, "Content") self.assertEqual(m.additional_properties, {}) def test_attr_enum(self): """ Test deserializing with Enum. """ test_obj = type("TestEnumObj", (Model,), {"_attribute_map":None}) test_obj._attribute_map = { "abc":{"key":"ABC", "type":"TestEnum"} } class TestEnum(Enum): val = "Value" deserializer = Deserializer({ 'TestEnumObj': test_obj, 'TestEnum': TestEnum }) obj = deserializer('TestEnumObj', { 'ABC': 'Value' }) self.assertEqual(obj.abc, TestEnum.val) obj = deserializer('TestEnumObj', { 'ABC': 'azerty' }) self.assertEqual(obj.abc, 'azerty') class TestEnum2(Enum): val2 = "Value" deserializer = Deserializer({ 'TestEnumObj': test_obj, 'TestEnum': TestEnum, 'TestEnum2': TestEnum2 }) obj = deserializer('TestEnumObj', { 'ABC': TestEnum2.val2 }) self.assertEqual(obj.abc, TestEnum.val) def test_long_as_type_object(self): """Test irrelevant on Python 3. But still doing it to test regresssion. https://github.com/Azure/msrest-for-python/pull/121 """ try: long_type = long except NameError: long_type = int class TestModel(Model): _attribute_map = {'data': {'key': 'data', 'type': 'object'}} m = TestModel.deserialize({'data': {'id': long_type(1)}}) assert m.data['id'] == long_type(1) def test_failsafe_deserialization(self): class Error(Model): _attribute_map = { "status": {"key": "status", "type": "int"}, "message": {"key": "message", "type": "str"}, } def __init__(self, **kwargs): super(Error, self).__init__(**kwargs) self.status = kwargs.get("status", None) self.message = kwargs.get("message", None) with pytest.raises(DeserializationError): self.d(Error, json.dumps(''), 'text/html') # should fail deserialized = self.d.failsafe_deserialize(Error, json.dumps(''), 'text/html') assert deserialized is None # should not fail error = {"status": 400, "message": "should deserialize"} deserialized = self.d.failsafe_deserialize(Error, json.dumps(error), 'application/json') assert deserialized.status == 400 assert deserialized.message == "should deserialize" class TestModelInstanceEquality(unittest.TestCase): def test_model_instance_equality(self): class Animal(Model): _attribute_map = { "name":{"key":"Name", "type":"str"}, } def __init__(self, name=None): self.name = name animal1 = Animal('a1') animal2 = Animal('a2') animal3 = Animal('a1') self.assertTrue(animal1!=animal2) self.assertTrue(animal1==animal3) if __name__ == '__main__': unittest.main() msrest-for-python-0.6.21/tests/test_universal_pipeline.py000066400000000000000000000163041400412460500236700ustar00rootroot00000000000000# -*- coding: utf-8 -*- #-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- try: from unittest import mock except ImportError: import mock import requests import pytest from msrest.exceptions import DeserializationError from msrest.universal_http import ( ClientRequest, ClientResponse, HTTPClientResponse, ) from msrest.universal_http.requests import RequestsClientResponse from msrest.pipeline import ( Response, Request ) from msrest.pipeline.universal import ( HTTPLogger, RawDeserializer, UserAgentPolicy ) def test_user_agent(): with mock.patch.dict('os.environ', {'AZURE_HTTP_USER_AGENT': "mytools"}): policy = UserAgentPolicy() assert policy.user_agent.endswith("mytools") request = ClientRequest('GET', 'http://127.0.0.1/') policy.on_request(Request(request)) assert request.headers["user-agent"].endswith("mytools") @mock.patch('msrest.http_logger._LOGGER') def test_no_log(mock_http_logger): universal_request = ClientRequest('GET', 'http://127.0.0.1/') request = Request(universal_request) http_logger = HTTPLogger() response = Response(request, ClientResponse(universal_request, None)) # By default, no log handler for HTTP http_logger.on_request(request) mock_http_logger.debug.assert_not_called() http_logger.on_response(request, response) mock_http_logger.debug.assert_not_called() mock_http_logger.reset_mock() # I can enable it per request http_logger.on_request(request, **{"enable_http_logger": True}) assert mock_http_logger.debug.call_count >= 1 http_logger.on_response(request, response, **{"enable_http_logger": True}) assert mock_http_logger.debug.call_count >= 1 mock_http_logger.reset_mock() # I can enable it per request (bool value should be honored) http_logger.on_request(request, **{"enable_http_logger": False}) mock_http_logger.debug.assert_not_called() http_logger.on_response(request, response, **{"enable_http_logger": False}) mock_http_logger.debug.assert_not_called() mock_http_logger.reset_mock() # I can enable it globally http_logger.enable_http_logger = True http_logger.on_request(request) assert mock_http_logger.debug.call_count >= 1 http_logger.on_response(request, response) assert mock_http_logger.debug.call_count >= 1 mock_http_logger.reset_mock() # I can enable it globally and override it locally http_logger.enable_http_logger = True http_logger.on_request(request, **{"enable_http_logger": False}) mock_http_logger.debug.assert_not_called() http_logger.on_response(request, response, **{"enable_http_logger": False}) mock_http_logger.debug.assert_not_called() mock_http_logger.reset_mock() def test_raw_deserializer(): raw_deserializer = RawDeserializer() def build_response(body, content_type=None): class MockResponse(HTTPClientResponse): def __init__(self, body, content_type): super(MockResponse, self).__init__(None, None) self._body = body if content_type: self.headers['content-type'] = content_type def body(self): return self._body return Response(None, MockResponse(body, content_type)) # I deserialize XML response = build_response(b"", content_type="application/xml") raw_deserializer.on_response(None, response, stream=False) result = response.context["deserialized_data"] assert result.tag == "groot" # The basic deserializer works with unicode XML result = raw_deserializer.deserialize_from_text(u'', content_type="application/xml") assert result.attrib["language"] == u"français" # Catch some weird situation where content_type is XML, but content is JSON response = build_response(b'{"ugly": true}', content_type="application/xml") raw_deserializer.on_response(None, response, stream=False) result = response.context["deserialized_data"] assert result["ugly"] is True # Be sure I catch the correct exception if it's neither XML nor JSON with pytest.raises(DeserializationError): response = build_response(b'gibberish', content_type="application/xml") raw_deserializer.on_response(None, response, stream=False) with pytest.raises(DeserializationError): response = build_response(b'{{gibberish}}', content_type="application/xml") raw_deserializer.on_response(None, response, stream=False) # Simple JSON response = build_response(b'{"success": true}', content_type="application/json") raw_deserializer.on_response(None, response, stream=False) result = response.context["deserialized_data"] assert result["success"] is True # Simple JSON with complex content_type response = build_response(b'{"success": true}', content_type="application/vnd.microsoft.appconfig.kv+json") raw_deserializer.on_response(None, response, stream=False) result = response.context["deserialized_data"] assert result["success"] is True # JSON with UTF-8 BOM response = build_response(b'\xef\xbb\xbf{"success": true}', content_type="application/json; charset=utf-8") raw_deserializer.on_response(None, response, stream=False) result = response.context["deserialized_data"] assert result["success"] is True # For compat, if no content-type, decode JSON response = build_response(b'"data"') raw_deserializer.on_response(None, response, stream=False) result = response.context["deserialized_data"] assert result == "data" # Try with a mock of requests req_response = requests.Response() req_response.headers["content-type"] = "application/json" req_response._content = b'{"success": true}' req_response._content_consumed = True response = Response(None, RequestsClientResponse(None, req_response)) raw_deserializer.on_response(None, response, stream=False) result = response.context["deserialized_data"] assert result["success"] is True msrest-for-python-0.6.21/tests/test_xml_serialization.py000066400000000000000000001525531400412460500235370ustar00rootroot00000000000000# -*- coding: utf-8 -*- #-------------------------------------------------------------------------- # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- import sys import xml.etree.ElementTree as ET import pytest from msrest.serialization import Serializer, Deserializer, Model, xml_key_extractor def assert_xml_equals(x1, x2): print("--------X1--------") ET.dump(x1) print("--------X2--------") ET.dump(x2) assert x1.tag == x2.tag assert (x1.text or "").strip() == (x2.text or "").strip() # assert x1.tail == x2.tail # Swagger does not change tail assert x1.attrib == x2.attrib assert len(x1) == len(x2) for c1, c2 in zip(x1, x2): assert_xml_equals(c1, c2) class TestXmlDeserialization: def test_basic(self): """Test an ultra basic XML.""" basic_xml = """ 12 37 """ class XmlModel(Model): _attribute_map = { 'longnode': {'key': 'longnode', 'type': 'long', 'xml':{'name': 'Long'}}, 'empty_long': {'key': 'empty_long', 'type': 'long', 'xml':{'name': 'EmptyLong'}}, 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age'}}, 'empty_age': {'key': 'empty_age', 'type': 'int', 'xml':{'name': 'EmptyAge'}}, 'empty_string': {'key': 'empty_string', 'type': 'str', 'xml':{'name': 'EmptyString'}}, 'not_set': {'key': 'not_set', 'type': 'str', 'xml':{'name': 'NotSet'}}, 'country': {'key': 'country', 'type': 'str', 'xml':{'name': 'country', 'attr': True}}, } _xml_map = { 'name': 'Data' } s = Deserializer({"XmlModel": XmlModel}) result = s(XmlModel, basic_xml, "application/xml") assert result.longnode == 12 assert result.empty_long is None assert result.age == 37 assert result.empty_age is None assert result.country == "france" assert result.empty_string == "" assert result.not_set is None def test_basic_unicode(self): """Test a XML with unicode.""" basic_xml = u""" """ class XmlModel(Model): _attribute_map = { 'language': {'key': 'language', 'type': 'str', 'xml':{'name': 'language', 'attr': True}}, } _xml_map = { 'name': 'Data' } s = Deserializer({"XmlModel": XmlModel}) result = s(XmlModel, basic_xml, "application/xml") assert result.language == u"français" def test_basic_text(self): """Test a XML with unicode.""" basic_xml = u""" I am text""" class XmlModel(Model): _attribute_map = { 'language': {'key': 'language', 'type': 'str', 'xml':{'name': 'language', 'attr': True}}, 'content': {'key': 'content', 'type': 'str', 'xml':{'text': True}}, } _xml_map = { 'name': 'Data' } s = Deserializer({"XmlModel": XmlModel}) result = s(XmlModel, basic_xml, "application/xml") assert result.language == "english" assert result.content == "I am text" def test_add_prop(self): """Test addProp as a dict. """ basic_xml = """ value1 value2 """ class XmlModel(Model): _attribute_map = { 'metadata': {'key': 'Metadata', 'type': '{str}', 'xml': {'name': 'Metadata'}}, } _xml_map = { 'name': 'Data' } s = Deserializer({"XmlModel": XmlModel}) result = s(XmlModel, basic_xml, "application/xml") assert len(result.metadata) == 2 assert result.metadata['Key1'] == "value1" assert result.metadata['Key2'] == "value2" def test_object(self): basic_xml = """ 37 """ s = Deserializer() result = s('object', basic_xml, "application/xml") # Should be a XML tree assert result.tag == "Data" assert result.get("country") == "france" for child in result: assert child.tag == "Age" assert child.text == "37" def test_object_no_text(self): basic_xml = """37""" s = Deserializer() result = s('object', basic_xml, "application/xml") # Should be a XML tree assert result.tag == "Data" assert result.get("country") == "france" for child in result: assert child.tag == "Age" assert child.text == "37" def test_basic_empty(self): """Test an basic XML with an empty node.""" basic_xml = """ """ class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'str', 'xml':{'name': 'Age'}}, } _xml_map = { 'name': 'Data' } s = Deserializer({"XmlModel": XmlModel}) result = s(XmlModel, basic_xml, "application/xml") assert result.age == "" def test_basic_empty_list(self): """Test an basic XML with an empty node.""" basic_xml = """ """ class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'str', 'xml':{'name': 'Age'}}, } _xml_map = { 'name': 'Data' } s = Deserializer({"XmlModel": XmlModel}) result = s('[XmlModel]', basic_xml, "application/xml") assert result == [] def test_list_wrapped_items_name_basic_types(self): """Test XML list and wrap, items is basic type and there is itemsName. """ basic_xml = """ granny fuji """ class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples', 'wrapped': True, 'itemsName': 'Apple'}}, } _xml_map = { 'name': 'AppleBarrel' } s = Deserializer({"AppleBarrel": AppleBarrel}) result = s(AppleBarrel, basic_xml, "application/xml") assert result.good_apples == ["granny", "fuji"] def test_list_not_wrapped_items_name_basic_types(self): """Test XML list and no wrap, items is basic type and there is itemsName. """ basic_xml = """ granny fuji """ class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples', 'itemsName': 'Apple'}}, } _xml_map = { 'name': 'AppleBarrel' } s = Deserializer({"AppleBarrel": AppleBarrel}) result = s(AppleBarrel, basic_xml, "application/xml") assert result.good_apples == ["granny", "fuji"] def test_list_wrapped_basic_types(self): """Test XML list and wrap, items is basic type and there is no itemsName. """ basic_xml = """ granny fuji """ class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples', 'wrapped': True}}, } _xml_map = { 'name': 'AppleBarrel' } s = Deserializer({"AppleBarrel": AppleBarrel}) result = s(AppleBarrel, basic_xml, "application/xml") assert result.good_apples == ["granny", "fuji"] def test_list_not_wrapped_basic_types(self): """Test XML list and no wrap, items is basic type and there is no itemsName. """ basic_xml = """ granny fuji """ class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples'}}, } _xml_map = { 'name': 'AppleBarrel' } s = Deserializer({"AppleBarrel": AppleBarrel}) result = s(AppleBarrel, basic_xml, "application/xml") assert result.good_apples == ["granny", "fuji"] def test_list_wrapped_items_name_complex_types(self): """Test XML list and wrap, items is ref and there is itemsName. """ basic_xml = """ """ class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'wrapped': True, 'itemsName': 'Apple'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Pomme' # Should be ignored, since "itemsName" is defined } s = Deserializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) result = s('AppleBarrel', basic_xml, "application/xml") assert [apple.name for apple in result.good_apples] == ["granny", "fuji"] def test_list_not_wrapped_items_name_complex_types(self): """Test XML list and wrap, items is ref and there is itemsName. """ basic_xml = """ """ class AppleBarrel(Model): _attribute_map = { # Pomme should be ignored, since it's invalid to define itemsName for a $ref type 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'itemsName': 'Pomme'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } s = Deserializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) result = s(AppleBarrel, basic_xml, "application/xml") assert [apple.name for apple in result.good_apples] == ["granny", "fuji"] def test_list_wrapped_complex_types(self): """Test XML list and wrap, items is ref and there is no itemsName. """ basic_xml = """ """ class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'wrapped': True}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } s = Deserializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) result = s(AppleBarrel, basic_xml, "application/xml") assert [apple.name for apple in result.good_apples] == ["granny", "fuji"] def test_list_not_wrapped_complex_types(self): """Test XML list and wrap, items is ref and there is no itemsName. """ basic_xml = """ """ class AppleBarrel(Model): _attribute_map = { # Name is ignored if wrapped is False 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } s = Deserializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) result = s(AppleBarrel, basic_xml, "application/xml") assert [apple.name for apple in result.good_apples] == ["granny", "fuji"] def test_basic_additional_properties(self): """Test an ultra basic XML.""" basic_xml = """ 1 bob """ class XmlModel(Model): _attribute_map = { 'additional_properties': {'key': '', 'type': '{str}', 'xml': {'name': 'additional_properties'}}, 'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'name': 'Encrypted', 'attr': True}}, } _xml_map = { 'name': 'Metadata' } def __init__(self, **kwargs): super(XmlModel, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.encrypted = kwargs.get('encrypted', None) s = Deserializer({"XmlModel": XmlModel}) result = s(XmlModel, basic_xml, "application/xml") assert result.additional_properties == {'name': 'bob', 'number': '1'} assert result.encrypted is None def test_basic_namespace(self): """Test an ultra basic XML.""" basic_xml = """ 37 """ class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age', 'prefix':'fictional','ns':'http://characters.example.com'}}, } _xml_map = { 'name': 'Data' } s = Deserializer({"XmlModel": XmlModel}) result = s(XmlModel, basic_xml, "application/xml") assert result.age == 37 def test_complex_namespace(self): """Test recursive namespace.""" basic_xml = """ lmazuel testpolicy 12 """ class XmlRoot(Model): _attribute_map = { 'author': {'key': 'author', 'type': 'QueueDescriptionResponseAuthor'}, 'authorization_rules': {'key': 'AuthorizationRules', 'type': '[AuthorizationRule]', 'xml': {'ns': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect', 'wrapped': True, 'itemsNs': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect'}}, 'message_count_details': {'key': 'MessageCountDetails', 'type': 'MessageCountDetails'}, } _xml_map = { 'name': 'entry', 'ns': 'http://www.w3.org/2005/Atom' } class QueueDescriptionResponseAuthor(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml': {'ns': 'http://www.w3.org/2005/Atom'}}, } _xml_map = { 'ns': 'http://www.w3.org/2005/Atom' } class AuthorizationRule(Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str', 'xml': {'attr': True, 'prefix': 'i', 'ns': 'http://www.w3.org/2001/XMLSchema-instance'}}, 'key_name': {'key': 'KeyName', 'type': 'str', 'xml': {'ns': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect'}}, } _xml_map = { 'ns': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect' } class MessageCountDetails(Model): _attribute_map = { 'active_message_count': {'key': 'ActiveMessageCount', 'type': 'int', 'xml': {'prefix': 'd2p1', 'ns': 'http://schemas.microsoft.com/netservices/2011/06/servicebus'}}, } _xml_map = { 'name': 'CountDetails', 'ns': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect' } s = Deserializer({ "XmlRoot": XmlRoot, "QueueDescriptionResponseAuthor": QueueDescriptionResponseAuthor, "AuthorizationRule": AuthorizationRule, "MessageCountDetails": MessageCountDetails, }) result = s(XmlRoot, basic_xml, "application/xml") assert result.author.name == "lmazuel" assert result.authorization_rules[0].key_name == "testpolicy" assert result.authorization_rules[0].type == "SharedAccessAuthorizationRule" assert result.message_count_details.active_message_count == 12 def test_polymorphic_deserialization(self): basic_xml = """ 12 """ class XmlRoot(Model): _attribute_map = { 'filter': {'key': 'Filter', 'type': 'RuleFilter'}, } _xml_map = { 'name': 'entry' } class RuleFilter(Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str', 'xml': {'attr': True, 'prefix': 'xsi', 'ns': 'http://www.w3.org/2001/XMLSchema-instance'}}, } _subtype_map = { 'type': {'CorrelationFilter': 'CorrelationFilter', 'SqlFilter': 'SqlFilter'} } _xml_map = { 'name': 'Filter' } class CorrelationFilter(RuleFilter): _attribute_map = { 'type': {'key': 'type', 'type': 'str', 'xml': {'attr': True, 'prefix': 'xsi', 'ns': 'http://www.w3.org/2001/XMLSchema-instance'}}, 'correlation_id': {'key': 'CorrelationId', 'type': 'int'}, } def __init__( self, correlation_id = None, **kwargs ): super(CorrelationFilter, self).__init__(**kwargs) self.type = 'CorrelationFilter' self.correlation_id = correlation_id class SqlFilter(RuleFilter): _attribute_map = { 'type': {'key': 'type', 'type': 'str', 'xml': {'attr': True, 'prefix': 'xsi', 'ns': 'http://www.w3.org/2001/XMLSchema-instance'}}, } def __init__( self, **kwargs ): pytest.fail("Don't instantiate me") s = Deserializer({ "XmlRoot": XmlRoot, "RuleFilter": RuleFilter, "SqlFilter": SqlFilter, "CorrelationFilter": CorrelationFilter, }) result = s(XmlRoot, basic_xml, "application/xml") assert isinstance(result.filter, CorrelationFilter) assert result.filter.correlation_id == 12 class TestXmlSerialization: def test_basic(self): """Test an ultra basic XML.""" basic_xml = ET.fromstring(""" 37 """) class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age'}}, 'country': {'key': 'country', 'type': 'str', 'xml':{'name': 'country', 'attr': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( age=37, country="france" ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel') assert_xml_equals(rawxml, basic_xml) def test_basic_unicode(self): """Test a XML with unicode.""" basic_xml = ET.fromstring(u""" """.encode("utf-8")) class XmlModel(Model): _attribute_map = { 'language': {'key': 'language', 'type': 'str', 'xml':{'name': 'language', 'attr': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( language=u"français" ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel') assert_xml_equals(rawxml, basic_xml) def test_nested_unicode(self): class XmlModel(Model): _attribute_map = { 'message_text': {'key': 'MessageText', 'type': 'str', 'xml': {'name': 'MessageText'}}, } _xml_map = { 'name': 'Message' } mymodel_no_unicode = XmlModel(message_text=u'message1') s = Serializer({"XmlModel": XmlModel}) body = s.body(mymodel_no_unicode, 'XmlModel') xml_content = ET.tostring(body, encoding="utf8") assert xml_content == b"\nmessage1" mymodel_with_unicode = XmlModel(message_text=u'message1㚈') s = Serializer({"XmlModel": XmlModel}) body = s.body(mymodel_with_unicode, 'XmlModel') xml_content = ET.tostring(body, encoding="utf8") assert xml_content == b"\nmessage1\xe3\x9a\x88" @pytest.mark.skipif(sys.version_info < (3,6), reason="Dict ordering not guaranted before 3.6, makes this complicated to test.") def test_add_prop(self): """Test addProp as a dict. """ basic_xml = ET.fromstring(""" value1 value2 """) class XmlModel(Model): _attribute_map = { 'metadata': {'key': 'Metadata', 'type': '{str}', 'xml': {'name': 'Metadata'}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( metadata={ 'Key1': 'value1', 'Key2': 'value2', } ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel') assert_xml_equals(rawxml, basic_xml) def test_object(self): """Test serialize object as is. """ basic_xml = ET.fromstring(""" 37 """) s = Serializer() rawxml = s.body(basic_xml, 'object') # It should actually be the same object, should not even try to touch it assert rawxml is basic_xml @pytest.mark.skipif(sys.version_info < (3,6), reason="Unstable before python3.6 for some reasons") def test_type_basic(self): """Test some types.""" basic_xml = ET.fromstring(""" 37 true """) class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age'}}, 'enabled': {'key': 'enabled', 'type': 'bool', 'xml':{'name': 'Enabled'}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( age=37, enabled=True ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel') assert_xml_equals(rawxml, basic_xml) def test_basic_text(self): """Test a XML with unicode.""" basic_xml = ET.fromstring(""" I am text""") class XmlModel(Model): _attribute_map = { 'language': {'key': 'language', 'type': 'str', 'xml':{'name': 'language', 'attr': True}}, 'content': {'key': 'content', 'type': 'str', 'xml':{'text': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( language="english", content="I am text" ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel') assert_xml_equals(rawxml, basic_xml) def test_direct_array(self): """Test an ultra basic XML.""" basic_xml = ET.fromstring(""" """) class XmlModel(Model): _attribute_map = { 'country': {'key': 'country', 'type': 'str', 'xml':{'name': 'country', 'attr': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( country="france" ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body( [mymodel], '[XmlModel]', serialization_ctxt={'xml': {'name': 'bananas', 'wrapped': True}} ) assert_xml_equals(rawxml, basic_xml) def test_list_wrapped_basic_types(self): """Test XML list and wrap, items is basic type and there is no itemsName. """ basic_xml = ET.fromstring(""" granny fuji """) class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples', 'wrapped': True}}, } _xml_map = { 'name': 'AppleBarrel' } mymodel = AppleBarrel( good_apples=['granny', 'fuji'] ) s = Serializer({"AppleBarrel": AppleBarrel}) rawxml = s.body(mymodel, 'AppleBarrel') assert_xml_equals(rawxml, basic_xml) def test_list_not_wrapped_basic_types(self): """Test XML list and no wrap, items is basic type and there is no itemsName. """ basic_xml = ET.fromstring(""" granny fuji """) class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples'}}, } _xml_map = { 'name': 'AppleBarrel' } mymodel = AppleBarrel( good_apples=['granny', 'fuji'] ) s = Serializer({"AppleBarrel": AppleBarrel}) rawxml = s.body(mymodel, 'AppleBarrel') assert_xml_equals(rawxml, basic_xml) def test_list_wrapped_items_name_complex_types(self): """Test XML list and wrap, items is ref and there is itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { # Pomme should be ignored, since it's invalid to define itemsName for a $ref type 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'wrapped': True, 'itemsName': 'Pomme'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel') assert_xml_equals(rawxml, basic_xml) def test_list_not_wrapped_items_name_complex_types(self): """Test XML list and wrap, items is ref and there is itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { # Pomme should be ignored, since it's invalid to define itemsName for a $ref type 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'itemsName': 'Pomme'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel') assert_xml_equals(rawxml, basic_xml) def test_list_wrapped_complex_types(self): """Test XML list and wrap, items is ref and there is no itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'wrapped': True}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel') assert_xml_equals(rawxml, basic_xml) def test_list_not_wrapped_complex_types(self): """Test XML list and wrap, items is ref and there is no itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { # Name is ignored if "wrapped" is False 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel') assert_xml_equals(rawxml, basic_xml) @pytest.mark.skipif(sys.version_info < (3,6), reason="Unstable before python3.6 for some reasons") def test_two_complex_same_type(self): """Two different attribute are same type """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { 'eu_apple': {'key': 'EuropeanApple', 'type': 'Apple', 'xml': {'name': 'EuropeanApple'}}, 'us_apple': {'key': 'USAApple', 'type': 'Apple', 'xml': {'name': 'USAApple'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { } mymodel = AppleBarrel( eu_apple=Apple(name='granny'), us_apple=Apple(name='fuji'), ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel') assert_xml_equals(rawxml, basic_xml) def test_basic_namespace(self): """Test an ultra basic XML.""" basic_xml = ET.fromstring(""" 37 """) class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age', 'prefix':'fictional','ns':'http://characters.example.com'}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( age=37, ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel') assert_xml_equals(rawxml, basic_xml) def test_basic_is_xml(self): """Test an ultra basic XML.""" basic_xml = ET.fromstring(""" 37 """) class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age'}}, 'country': {'key': 'country', 'type': 'str', 'xml':{'name': 'country', 'attr': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( age=37, country="france", ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_basic_unicode_is_xml(self): """Test a XML with unicode.""" basic_xml = ET.fromstring(u""" """.encode("utf-8")) class XmlModel(Model): _attribute_map = { 'language': {'key': 'language', 'type': 'str', 'xml':{'name': 'language', 'attr': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( language=u"français" ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel', is_xml=True) assert_xml_equals(rawxml, basic_xml) @pytest.mark.skipif(sys.version_info < (3,6), reason="Dict ordering not guaranted before 3.6, makes this complicated to test.") def test_add_prop_is_xml(self): """Test addProp as a dict. """ basic_xml = ET.fromstring(""" value1 value2 """) class XmlModel(Model): _attribute_map = { 'metadata': {'key': 'Metadata', 'type': '{str}', 'xml': {'name': 'Metadata'}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( metadata={ 'Key1': 'value1', 'Key2': 'value2', } ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_object_is_xml(self): """Test serialize object as is. """ basic_xml = ET.fromstring(""" 37 """) s = Serializer() rawxml = s.body(basic_xml, 'object', is_xml=True) # It should actually be the same object, should not even try to touch it assert rawxml is basic_xml @pytest.mark.skipif(sys.version_info < (3,6), reason="Unstable before python3.6 for some reasons") def test_type_basic_is_xml(self): """Test some types.""" basic_xml = ET.fromstring(""" 37 true """) class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age'}}, 'enabled': {'key': 'enabled', 'type': 'bool', 'xml':{'name': 'Enabled'}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( age=37, enabled=True ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_direct_array_is_xml(self): """Test an ultra basic XML.""" basic_xml = ET.fromstring(""" """) class XmlModel(Model): _attribute_map = { 'country': {'key': 'country', 'type': 'str', 'xml':{'name': 'country', 'attr': True}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( country="france" ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body( [mymodel], '[XmlModel]', serialization_ctxt={'xml': {'name': 'bananas', 'wrapped': True}}, is_xml=True ) assert_xml_equals(rawxml, basic_xml) def test_list_wrapped_basic_types_is_xml(self): """Test XML list and wrap, items is basic type and there is no itemsName. """ basic_xml = ET.fromstring(""" granny fuji """) class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples', 'wrapped': True}}, } _xml_map = { 'name': 'AppleBarrel' } mymodel = AppleBarrel( good_apples=['granny', 'fuji'] ) s = Serializer({"AppleBarrel": AppleBarrel}) rawxml = s.body(mymodel, 'AppleBarrel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_list_not_wrapped_basic_types_is_xml(self): """Test XML list and no wrap, items is basic type and there is no itemsName. """ basic_xml = ET.fromstring(""" granny fuji """) class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[str]', 'xml': {'name': 'GoodApples'}}, } _xml_map = { 'name': 'AppleBarrel' } mymodel = AppleBarrel( good_apples=['granny', 'fuji'] ) s = Serializer({"AppleBarrel": AppleBarrel}) rawxml = s.body(mymodel, 'AppleBarrel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_list_wrapped_items_name_complex_types_is_xml(self): """Test XML list and wrap, items is ref and there is itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { # Pomme should be ignored, since it's invalid to define itemsName for a $ref type 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'wrapped': True, 'itemsName': 'Pomme'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_list_not_wrapped_items_name_complex_types_is_xml(self): """Test XML list and wrap, items is ref and there is itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { # Pomme should be ignored, since it's invalid to define itemsName for a $ref type 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'itemsName': 'Pomme'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_list_wrapped_complex_types_is_xml(self): """Test XML list and wrap, items is ref and there is no itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples', 'wrapped': True}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_list_not_wrapped_complex_types_is_xml(self): """Test XML list and wrap, items is ref and there is no itemsName. """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { # Name is ignored if "wrapped" is False 'good_apples': {'key': 'GoodApples', 'type': '[Apple]', 'xml': {'name': 'GoodApples'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { 'name': 'Apple' } mymodel = AppleBarrel( good_apples=[ Apple(name='granny'), Apple(name='fuji') ] ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel', is_xml=True) assert_xml_equals(rawxml, basic_xml) @pytest.mark.skipif(sys.version_info < (3,6), reason="Unstable before python3.6 for some reasons") def test_two_complex_same_type_is_xml(self): """Two different attribute are same type """ basic_xml = ET.fromstring(""" """) class AppleBarrel(Model): _attribute_map = { 'eu_apple': {'key': 'EuropeanApple', 'type': 'Apple', 'xml': {'name': 'EuropeanApple'}}, 'us_apple': {'key': 'USAApple', 'type': 'Apple', 'xml': {'name': 'USAApple'}}, } _xml_map = { 'name': 'AppleBarrel' } class Apple(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml':{'name': 'name', 'attr': True}}, } _xml_map = { } mymodel = AppleBarrel( eu_apple=Apple(name='granny'), us_apple=Apple(name='fuji'), ) s = Serializer({"AppleBarrel": AppleBarrel, "Apple": Apple}) rawxml = s.body(mymodel, 'AppleBarrel', is_xml=True) assert_xml_equals(rawxml, basic_xml) def test_basic_namespace_is_xml(self): """Test an ultra basic XML.""" basic_xml = ET.fromstring(""" 37 """) class XmlModel(Model): _attribute_map = { 'age': {'key': 'age', 'type': 'int', 'xml':{'name': 'Age', 'prefix':'fictional','ns':'http://characters.example.com'}}, } _xml_map = { 'name': 'Data' } mymodel = XmlModel( age=37, ) s = Serializer({"XmlModel": XmlModel}) rawxml = s.body(mymodel, 'XmlModel', is_xml=True) assert_xml_equals(rawxml, basic_xml) @pytest.mark.skipif(sys.version_info < (3,6), reason="Unstable before python3.6 for some reasons") def test_complex_namespace(self): """Test recursive namespace.""" basic_xml = ET.fromstring(""" lmazuel testpolicy """) class XmlRoot(Model): _attribute_map = { 'author': {'key': 'author', 'type': 'QueueDescriptionResponseAuthor'}, 'authorization_rules': {'key': 'AuthorizationRules', 'type': '[AuthorizationRule]', 'xml': {'ns': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect', 'wrapped': True, 'itemsNs': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect'}}, } _xml_map = { 'name': 'entry', 'ns': 'http://www.w3.org/2005/Atom' } class QueueDescriptionResponseAuthor(Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str', 'xml': {'ns': 'http://www.w3.org/2005/Atom'}}, } _xml_map = { 'ns': 'http://www.w3.org/2005/Atom' } class AuthorizationRule(Model): _attribute_map = { 'type': {'key': 'type', 'type': 'str', 'xml': {'attr': True, 'prefix': 'i', 'ns': 'http://www.w3.org/2001/XMLSchema-instance'}}, 'key_name': {'key': 'KeyName', 'type': 'str', 'xml': {'ns': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect'}}, } _xml_map = { 'ns': 'http://schemas.microsoft.com/netservices/2010/10/servicebus/connect' } mymodel = XmlRoot( author = QueueDescriptionResponseAuthor(name = "lmazuel"), authorization_rules = [AuthorizationRule( type="SharedAccessAuthorizationRule", key_name="testpolicy" )] ) s = Serializer({ "XmlRoot": XmlRoot, "QueueDescriptionResponseAuthor": QueueDescriptionResponseAuthor, "AuthorizationRule": AuthorizationRule, }) rawxml = s.body(mymodel, 'XmlModel') assert_xml_equals(rawxml, basic_xml) msrest-for-python-0.6.21/tox.ini000066400000000000000000000007171400412460500165340ustar00rootroot00000000000000[tox] envlist=py27, py36 skipsdist=True [testenv] setenv = PYTHONPATH = {toxinidir}:{toxinidir}/msrest PythonLogLevel=30 deps= -rdev_requirements.txt commands_pre= autorest: bash ./autorest_setup.sh commands= pytest --cov=msrest tests/ autorest: pytest --cov=msrest --cov-append autorest.python/test/vanilla/ coverage report --fail-under=40 coverage xml --ignore-errors # At this point, don't fail for "async" keyword in 2.7/3.4