pax_global_header00006660000000000000000000000064147541434620014524gustar00rootroot0000000000000052 comment=3c0fe9f15a1f687afcd532d78a3e2cd37f87d8aa django-storages-1.14.5/000077500000000000000000000000001475414346200147035ustar00rootroot00000000000000django-storages-1.14.5/.git-blame-ignore-revs000066400000000000000000000001331475414346200210000ustar00rootroot00000000000000# .git-blame-ignore-revs # Format code with Black 44f832202f4b434ba9d15e8eb72f859d208dd26d django-storages-1.14.5/.gitattributes000066400000000000000000000000141475414346200175710ustar00rootroot00000000000000* text=auto django-storages-1.14.5/.github/000077500000000000000000000000001475414346200162435ustar00rootroot00000000000000django-storages-1.14.5/.github/FUNDING.yml000066400000000000000000000000371475414346200200600ustar00rootroot00000000000000tidelift: pypi/django-storages django-storages-1.14.5/.github/workflows/000077500000000000000000000000001475414346200203005ustar00rootroot00000000000000django-storages-1.14.5/.github/workflows/ci.yml000066400000000000000000000053571475414346200214300ustar00rootroot00000000000000name: CI on: push: branches: [ master ] pull_request: branches: - '**' jobs: run_unittest_tests: name: Unittest runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-20.04] python-version: - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" django-version: - "3.2" - "4.1" - "4.2" - "5.0" - "5.1" - "main" exclude: - python-version: "3.7" django-version: "4.1" - python-version: "3.12" django-version: "4.1" - python-version: "3.7" django-version: "4.2" - python-version: "3.12" django-version: "4.2" - python-version: "3.7" django-version: "5.0" - python-version: "3.8" django-version: "5.0" - python-version: "3.9" django-version: "5.0" - python-version: "3.7" django-version: "5.1" - python-version: "3.8" django-version: "5.1" - python-version: "3.9" django-version: "5.1" - python-version: "3.7" django-version: "main" - python-version: "3.8" django-version: "main" - python-version: "3.9" django-version: "main" - python-version: "3.10" django-version: "main" - python-version: "3.11" django-version: "main" - python-version: "3.11" django-version: "3.2" - python-version: "3.12" django-version: "3.2" steps: - uses: actions/checkout@v4 - name: setup python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install Dependencies run: | pip install --upgrade setuptools pip install tox - name: Run unittest Python ${{ matrix.python-version }} -- Django ${{ matrix.django-version }} env: TOXENV: py${{ matrix.python-version }}-django${{ matrix.django-version }} run: tox run_quality_tests: name: Quality tests runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-20.04] python-version: ["3.7"] steps: - uses: actions/checkout@v4 - name: setup python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install Dependencies run: | pip install tox - name: Quality tests env: TOXENV: ruff run: | tox django-storages-1.14.5/.gitignore000066400000000000000000000002321475414346200166700ustar00rootroot00000000000000*.egg *.egg-info *.orig *.pyc *.swp .tox/ build/ __pycache__ .coverage .cache .idea/ .vscode/ .pytest_cache/ venv/ .venv/ dist/ docs/_build .DS_Store django-storages-1.14.5/.readthedocs.yaml000066400000000000000000000004531475414346200201340ustar00rootroot00000000000000# Read the Docs configuration file for Sphinx projects # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 build: os: ubuntu-22.04 tools: python: "3.11" sphinx: configuration: docs/conf.py python: install: - requirements: docs/requirements.txt django-storages-1.14.5/CHANGELOG.rst000066400000000000000000001726051475414346200167370ustar00rootroot00000000000000django-storages CHANGELOG ========================= 1.14.5 (2025-02-15) ******************* General ------- - Revert ``exists()`` behavior to pre-1.14.4 semantics with additional hardening for Django versions < 4.2 to fix CVE-2024-39330. This change matches the eventual behavior Django itself shipped with. (`#1484`_, `#1486`_) - Add support for Django 5.1 (`#1444`_) Azure ----- - **Deprecated**: The setting ``AZURE_API_VERSION/api_version`` setting is deprecated in favor of the new ``AZURE_CLIENT_OPTIONS`` setting. A future version will remove support for this setting. - Add ``AZURE_CLIENT_OPTIONS`` settings to enable customization of all ``BlobServiceClient`` parameters such as ``api_version`` and all ``retry*`` options. (`#1432`_) Dropbox ------- - As part of the above hardening fix a bug was uncovered whereby a ``root_path`` setting would be applied multiple times during ``save()`` (`#1484`_) FTP --- - Fix incorrect ``exists()`` results due to an errant appended slash (`#1438`_) Google Cloud ------------ - Switch checksum to ``crc32c`` to fix downloading when running in FIPS mode (`#1473`_) - Fix double decompression when using ``gzip`` (`#1457`_) .. _#1484: https://github.com/jschneier/django-storages/pull/1484 .. _#1486: https://github.com/jschneier/django-storages/pull/1486 .. _#1444: https://github.com/jschneier/django-storages/pull/1444 .. _#1432: https://github.com/jschneier/django-storages/pull/1432 .. _#1473: https://github.com/jschneier/django-storages/pull/1473 .. _#1457: https://github.com/jschneier/django-storages/pull/1457 .. _#1438: https://github.com/jschneier/django-storages/pull/1438 1.14.4 (2024-07-09) ******************* S3 -- - Pull ``AWS_SESSION_TOKEN`` from the environment (`#1399`_) - Fix newline handling for text mode files (`#1381`_) - Do not sign URLs when ``querystring_auth=False`` e.g public buckets or static files (`#1402`_) - Cache CloudFront Signers (`#1417`_) Azure ----- - Fix ``collectstatic --clear`` (`#1403`_) - Add ``mode`` kwarg to ``.url()`` to support creation of signed URLs for upload (`#1414`_) - Fix fetching user delegation key when custom domain is enabled (`#1418`_) SFTP ---- - Add implementations of ``get_(modified|accessed)_time`` (`#1347`_) Dropbox ------- - Add support for Python 3.12 (`#1421`_) FTP --- - Conform to ``BaseStorage`` interface (`#1423`_) - Add ``FTP_ALLOW_OVERWRITE`` setting (`#1424`_) .. _#1399: https://github.com/jschneier/django-storages/pull/1399 .. _#1381: https://github.com/jschneier/django-storages/pull/1381 .. _#1402: https://github.com/jschneier/django-storages/pull/1402 .. _#1403: https://github.com/jschneier/django-storages/pull/1403 .. _#1414: https://github.com/jschneier/django-storages/pull/1414 .. _#1417: https://github.com/jschneier/django-storages/pull/1417 .. _#1418: https://github.com/jschneier/django-storages/pull/1418 .. _#1347: https://github.com/jschneier/django-storages/pull/1347 .. _#1421: https://github.com/jschneier/django-storages/pull/1421 .. _#1423: https://github.com/jschneier/django-storages/pull/1423 .. _#1424: https://github.com/jschneier/django-storages/pull/1424 1.14.3 (2024-05-04) ******************* General ------- - Add support for Django 5.0 and Python 3.12 (`#1331`_) S3 -- - **Deprecated**: The ``config`` class property has been deprecated in favor of the ``client_config`` setting, a future version will remove support for the property. - Fix disabling CloudFront signing with class variables (`#1334`_) - Fix ``AWS_S3_*`` environment variables lookup (`#1336`_) - Add ``client_config/AWS_S3_CLIENT_CONFIG`` to configure advanced ``botocore`` settings (`#1386`_) Google Cloud ------------ - Fix re-gzipping already gzipped files (`#1366`_) SFTP ---- - Add ``SFTP_BASE_URL`` setting (`#1368`_) - Fix saving files when ``SFTP_STORAGE_ROOT`` is set (`#1372`_) FTP --- - Add support for FTP TLS via ``ftps`` URLs (`#1320`_) - Add support for passwords with urlchars (`#1329`_) .. _#1331: https://github.com/jschneier/django-storages/pull/1331 .. _#1386: https://github.com/jschneier/django-storages/pull/1386 .. _#1372: https://github.com/jschneier/django-storages/pull/1372 .. _#1334: https://github.com/jschneier/django-storages/pull/1334 .. _#1336: https://github.com/jschneier/django-storages/pull/1336 .. _#1366: https://github.com/jschneier/django-storages/pull/1366 .. _#1368: https://github.com/jschneier/django-storages/pull/1368 .. _#1320: https://github.com/jschneier/django-storages/pull/1320 .. _#1329: https://github.com/jschneier/django-storages/pull/1329 1.14.2 (2023-10-08) ******************* S3 -- - Fix re-opening of ``S3File`` (`#1321`_) - Revert raising ``ImproperlyConfigured`` when no ``bucket_name`` is set (`#1322`_) .. _#1321: https://github.com/jschneier/django-storages/pull/1321 .. _#1322: https://github.com/jschneier/django-storages/pull/1322 1.14.1 (2023-09-29) ******************* Azure ----- - Do not require both ``AccountName`` and ``AccountKey`` in ``connection_string`` (`#1312`_) S3 -- - Work around boto3 closing the uploaded file (`#1303`_) - Fix crash when cleaning up during aborted connection of ``S3File.write`` (`#1304`_) - Raise ``FileNotFoundError`` when attempting to read the ``size`` of a non-existent file (`#1309`_) - Move auth & CloudFront signer validation to init (`#1302`_) - Raise ``ImproperlyConfigured`` if no ``bucket_name`` is set (`#1313`_) - Fix tracking of ``S3File.closed`` (`#1311`_) .. _#1303: https://github.com/jschneier/django-storages/pull/1303 .. _#1304: https://github.com/jschneier/django-storages/pull/1304 .. _#1309: https://github.com/jschneier/django-storages/pull/1309 .. _#1302: https://github.com/jschneier/django-storages/pull/1302 .. _#1313: https://github.com/jschneier/django-storages/pull/1313 .. _#1312: https://github.com/jschneier/django-storages/pull/1312 .. _#1311: https://github.com/jschneier/django-storages/pull/1311 1.14 (2023-09-04) ******************* General ------- - **Breaking**: Drop support for Django 4.0 (`#1235`_) - **Breaking**: The long deprecated & removed (from Django) ``(modified|created|accessed)_time`` methods have been removed from the various storages, please replace with the ``get_(modified|created|accessed)_time`` methods - Add support for saving ``pathlib.PurePath`` names (`#1278`_) - Add support for Django 4.2 (`#1236`_) Azure ----- - Set ``account_(name|key)`` from ``connection_string`` if not provided (`#1225`_) Dropbox ------- - **Deprecated:** The name ``DropboxStorage.location`` has been deprecated, please rename to ``DropboxStorage.root_path``, a future version will remove support for the old name. (`#1251`_) - Storage and related names with a captialized B have been changed to no longer have one e.g ``DropboxStorage`` has now replaced ``DropBoxStorage``. Aliases have been added so no change is necessary at this time. A future version might deprecate the old names. (`#1250`_) - ``DropboxStorage`` now conforms to the ``BaseStorage`` interface (`#1251`_) - Fix name mangling when saving with certain complex root paths (`#1279`_) FTP --- - Use setting ``BASE_URL`` if it is defined (`#1238`_) Google Cloud ------------ - **Breaking**: Support for the deprecated ``GS_CACHE_CONTROL`` has been removed. Please set the ``cache_control`` parameter of ``GS_OBJECT_PARAMETERS`` instead. (`#1220`_) Libcloud -------- - Reading a file that does not exist will now raise ``FileNotFoundError`` (`#1191`_) SFTP ---- - Add closing context manager for standalone usage to ensure connections are cleaned up (`#1253`_) S3 -- - **Deprecated:** ``AWS_S3_USE_THREADS`` has been deprecated in favor of ``AWS_S3_TRANSFER_CONFIG`` (`#1280`_) - **Important:** The namespace of this backend has changed from ``S3Boto3`` to ``S3``. There are no current plans to deprecate and remove the old namespace but please update if you can. All paths, imports, and classes that previously referred to ``s3boto`` are now ``s3``. E.g ``S3Boto3Storage`` has been changed to ``S3Storage`` and ``S3Boto3StorageFile`` has been changed to ``S3File``. (`#1289`_). Additionally the install extra is now ``s3`` (`#1284`_) - Add setting ``transfer_config/AWS_S3_TRANSFER_CONFIG`` to customize any of the ``TransferConfig`` properties (`#1280`_) - Enable passing ``security_token`` to constructor (`#1246`_) - Do not overwrite a returned ``ContentType`` from ``get_object_parameters`` (`#1281`_) - Add support for setting ``cloudfront_key_id`` and ``cloudfront_key`` via Django 4.2's ``OPTIONS`` (`#1274`_) - Fix ``S3File.closed`` (`#1249`_) - Fix opening new files in write mode with ``S3File`` (`#1282`_) - Fix ``S3File`` not respecting mode on ``readlines`` (`#1000`_) - Fix saving files with string content (`#911`_) - Fix retrieving files with SSE-C enabled (`#1286`_) .. _#1280: https://github.com/jschneier/django-storages/pull/1280 .. _#1289: https://github.com/jschneier/django-storages/pull/1289 .. _#1284: https://github.com/jschneier/django-storages/pull/1284 .. _#1274: https://github.com/jschneier/django-storages/pull/1274 .. _#1281: https://github.com/jschneier/django-storages/pull/1281 .. _#1282: https://github.com/jschneier/django-storages/pull/1282 .. _#1279: https://github.com/jschneier/django-storages/pull/1279 .. _#1278: https://github.com/jschneier/django-storages/pull/1278 .. _#1235: https://github.com/jschneier/django-storages/pull/1235 .. _#1236: https://github.com/jschneier/django-storages/pull/1236 .. _#1225: https://github.com/jschneier/django-storages/pull/1225 .. _#1251: https://github.com/jschneier/django-storages/pull/1251 .. _#1250: https://github.com/jschneier/django-storages/pull/1250 .. _#1238: https://github.com/jschneier/django-storages/pull/1238 .. _#1220: https://github.com/jschneier/django-storages/pull/1220 .. _#1191: https://github.com/jschneier/django-storages/pull/1191 .. _#1253: https://github.com/jschneier/django-storages/pull/1253 .. _#1246: https://github.com/jschneier/django-storages/pull/1246 .. _#1249: https://github.com/jschneier/django-storages/pull/1249 .. _#1000: https://github.com/jschneier/django-storages/pull/1000 .. _#911: https://github.com/jschneier/django-storages/pull/911 .. _#1286: https://github.com/jschneier/django-storages/pull/1286 1.13.2 (2022-12-23) ******************* General ------- - Add support for Python 3.11 (`#1196`_) - Add support for saving ``pathlib.Path`` names (`#1200`_) S3 -- - Catch 404 errors when calling ``delete()`` (`#1201`_) Azure ----- - Use ``AZURE_CUSTOM_DOMAIN`` for retrieving blob URLs and storage URL for other operations (`#1176`_) Google Cloud ------------ - Use ``DEFAULT_RETRY`` for all upload & delete operations (`#1156`_) - Fix gzipping of content (`#1203`_) - Pass through kwargs to signed URL generator (`#1193`_) SFTP ---- - Improve write & memory performance when saving files (`#1194`_) .. _#1196: https://github.com/jschneier/django-storages/pull/1196 .. _#1200: https://github.com/jschneier/django-storages/pull/1200 .. _#1201: https://github.com/jschneier/django-storages/pull/1201 .. _#1176: https://github.com/jschneier/django-storages/pull/1176 .. _#1156: https://github.com/jschneier/django-storages/pull/1156 .. _#1203: https://github.com/jschneier/django-storages/pull/1203 .. _#1193: https://github.com/jschneier/django-storages/pull/1193 .. _#1194: https://github.com/jschneier/django-storages/pull/1194 1.13.1 (2022-08-06) ******************* Dropbox ------- - Strip off the root path when saving files to fix saving with upgraded versions of Django (`#1168`_) - Update ``DropBoxStorage`` constructor parameter order to be backwards compatible (`#1167`_) .. _#1167: https://github.com/jschneier/django-storages/pull/1167 .. _#1168: https://github.com/jschneier/django-storages/pull/1168 1.13 (2022-08-05) ***************** General ------- - Add support for Django 4.0 and 4.1 (`#1093`_) - Drop support for Django 2.2, 3.0 and 3.1 (`#1093`_) - Drop support for Python 3.5 and 3.6 (`#1093`_) S3 -- - **Breaking**: Update and document the undocumented ``AWS_S3_URL_PROTOCOL`` from ``http:`` to ``https:`` and remove the undocumented ``AWS_S3_SECURE_URLS`` setting. You should only need to update your settings if you had updated either of these previously undocumented settings. The default behavior of constructing an ``https:`` URL with a custom domain is unchanged (`#1164`_) - Add ``AWS_S3_USE_THREADS`` to disable ``threading`` for compatibility with ``gevent`` (`#1112`_) Dropbox ------- - Add support for refresh tokens (`#1159`_) - Ignore ``ApiError`` exception in ``url()`` (`#1158`_) Azure ----- - Restore support for ``AZURE_ENDPOINT_SUFFIX`` (`#1118`_) - Replace deprecated ``download_to_stream`` with ``readinto`` (`#1113`_) - Add ``AZURE_API_VERSION`` setting (`#1132`_) - Fix ``get_modified_time()`` (`#1134`_) Google Cloud ------------ - Add support for gzipping files via ``GS_IS_GZIPPED`` and ``GZIP_CONTENT_TYPES`` (`#980`_) - Use ``GS_BLOB_CHUNK_SIZE`` with files that already exist (`#1154`_) .. _#980: https://github.com/jschneier/django-storages/pull/980 .. _#1118: https://github.com/jschneier/django-storages/pull/1118 .. _#1113: https://github.com/jschneier/django-storages/pull/1113 .. _#1112: https://github.com/jschneier/django-storages/pull/1112 .. _#1132: https://github.com/jschneier/django-storages/pull/1132 .. _#1134: https://github.com/jschneier/django-storages/pull/1134 .. _#1159: https://github.com/jschneier/django-storages/pull/1159 .. _#1158: https://github.com/jschneier/django-storages/pull/1158 .. _#1164: https://github.com/jschneier/django-storages/pull/1164 .. _#1093: https://github.com/jschneier/django-storages/pull/1093 .. _#1154: https://github.com/jschneier/django-storages/pull/1154 1.12.3 (2021-10-29) ******************* General ------- - Add support for Python 3.10 (`#1078`_) S3 -- - Re-raise non-404 errors in ``.exists()`` (`#1084`_, `#1085`_) Azure ----- - Fix using ``AZURE_CUSTOM_DOMAIN`` with an account key credential (`#1082`_, `#1083`_) SFTP ---- - Catch ``FileNotFoundError`` instead of ``OSerror`` in ``.exists()`` to prevent swallowing ``socket.timeout`` exceptions (`#1064`_, `#1087`_) .. _#1078: https://github.com/jschneier/django-storages/pull/1078 .. _#1084: https://github.com/jschneier/django-storages/issues/1084 .. _#1085: https://github.com/jschneier/django-storages/pull/1085 .. _#1082: https://github.com/jschneier/django-storages/issues/1082 .. _#1083: https://github.com/jschneier/django-storages/pull/1083 .. _#1064: https://github.com/jschneier/django-storages/issues/1064 .. _#1087: https://github.com/jschneier/django-storages/pull/1087 1.12.2 (2021-10-16) ******************* Azure ----- - Add ``parameters`` kwarg to ``AzureStorage.url`` to configure blob properties in the SAS token (`#1071`_) - Fix regression where ``AZURE_CUSTOM_DOMAIN`` was interpreted as a replacement of ``blob.core.windows.net`` rather than as a full domain (`#1073`_, `#1076`_) .. _#1071: https://github.com/jschneier/django-storages/pull/1071 .. _#1073: https://github.com/jschneier/django-storages/issues/1073 .. _#1076: https://github.com/jschneier/django-storages/pull/1076 1.12.1 (2021-10-11) ******************* S3 -- - Change gzip compression to use a streaming implementation (`#1061`_) - Fix saving files with ``S3ManifestStaticStorage`` (`#1068`_, `#1069`_) .. _#1061: https://github.com/jschneier/django-storages/pull/1061 .. _#1068: https://github.com/jschneier/django-storages/issues/1068 .. _#1069: https://github.com/jschneier/django-storages/pull/1069 1.12 (2021-10-06) ***************** General ------- - Add support for Django 3.2 (`#1046`_, `#1042`_, `#1005`_) - Replace Travis CI with GitHub actions (`#1051`_) S3 -- - Convert signing keys to bytes if necessary (`#1003`_) - Avoid a ListParts API call during multipart upload (`#1041`_) - Custom domains now use passed URL params (`#1054`_) - Allow the use of AWS profiles and clarify the options for passing credentials (`fbe9538`_) - Re-allow override of various access key names (`#1026`_) - Properly exclude empty folders during ``listdir`` (`66f4f8e`_) - Support saving file objects that are not ``seekable`` (`#860`_, `#1057`_) - Return ``True`` for ``.exists()`` if a non-404 error is encountered (`#938`_) Azure ----- - **Breaking**: This backend has been rewritten to use the newer versions of ``azure-storage-blob``, which now has a minimum required version of 12.0. The settings ``AZURE_EMULATED_MODE``, ``AZURE_ENDPOINT_SUFFIX``, and ``AZURE_CUSTOM_CONNECTION_STRING`` are now ignored. (`#784`_, `#805`_) - Add support for user delegation keys (`#1063`_) Google Cloud ------------ - **Breaking**: The minimum required version of ``google-cloud-storage`` is now 1.27.0 (`#994`_) - **Breaking**: Switch URL signing version from v2 to v4 (`#994`_) - **Deprecated**: Support for ``GS_CACHE_CONTROL`` will be removed in 1.13. Please set the ``cache_control`` parameter of ``GS_OBJECT_PARAMETERS`` instead. (`#970`_) - Add ``GS_OBJECT_PARAMETERS`` and overridable ``GoogleCloudStorage.get_object_parameters`` to customize blob parameters for all blobs and per-blob respectively. (`#970`_) - Catch the ``NotFound`` exception raised when deleting a non-existent blob, this matches Django and other backends (`#998`_, `#999`_) - Fix signing URLs with custom endpoints (`#994`_) Dropbox ------- - Validate ``write_mode`` param (`#1020`_) .. _fbe9538: https://github.com/jschneier/django-storages/commit/fbe9538b8574cfb0d95b04c9c477650dbfe8547b .. _66f4f8e: https://github.com/jschneier/django-storages/commit/66f4f8ec68daaac767c013d6b1a30cf26a7ac1ca .. _#1003: https://github.com/jschneier/django-storages/pull/1003 .. _#1054: https://github.com/jschneier/django-storages/pull/1054 .. _#1026: https://github.com/jschneier/django-storages/pull/1026 .. _#1041: https://github.com/jschneier/django-storages/pull/1041 .. _#970: https://github.com/jschneier/django-storages/pull/970 .. _#998: https://github.com/jschneier/django-storages/issues/998 .. _#784: https://github.com/jschneier/django-storages/issues/784 .. _#805: https://github.com/jschneier/django-storages/pull/805 .. _#999: https://github.com/jschneier/django-storages/pull/999 .. _#1051: https://github.com/jschneier/django-storages/pull/1051 .. _#1042: https://github.com/jschneier/django-storages/pull/1042 .. _#1046: https://github.com/jschneier/django-storages/issues/1046 .. _#1005: https://github.com/jschneier/django-storages/pull/1005 .. _#1020: https://github.com/jschneier/django-storages/pull/1020 .. _#860: https://github.com/jschneier/django-storages/issues/860 .. _#1057: https://github.com/jschneier/django-storages/pull/1057 .. _#938: https://github.com/jschneier/django-storages/pull/938 .. _#994: https://github.com/jschneier/django-storages/pull/994 .. _#1063: https://github.com/jschneier/django-storages/pull/1063 1.11.1 (2020-12-23) ******************* S3 -- - Revert fix for ``ValueError: I/O operation on closed file`` when calling ``collectstatic`` and introduce ``S3StaticStorage`` and ``S3ManifestStaticStorage`` for use as ``STATICFILES_STORAGE`` targets (`#968`_) .. _#968: https://github.com/jschneier/django-storages/pull/968 1.11 (2020-12-16) ***************** General ------- - Test against Python 3.9 (`#964`_) S3 -- - Fix ``ValueError: I/O operation on closed file`` when calling ``collectstatic`` (`#382`_, `#955`_) - Calculate ``S3Boto3StorageFile.buffer_size`` (via setting ``AWS_S3_FILE_BUFFER_SIZE``) at run-time rather than import-time. (`#930`_) - Fix writing ``bytearray`` content (`#958`_, `#965`_) Google Cloud ------------ - Add setting ``GS_QUERYSTRING_AUTH`` to avoid signing URLs. This is useful for buckets with a policy of Uniform public read (`#952`_) Azure ----- - Add ``AZURE_OBJECT_PARAMETERS`` and overridable ``AzureStorage.get_object_parameters`` to customize ``ContentSettings`` parameters for all keys and per-key respectively. (`#898`_) .. _#382: https://github.com/jschneier/django-storages/issues/382 .. _#955: https://github.com/jschneier/django-storages/pull/955 .. _#930: https://github.com/jschneier/django-storages/pull/930 .. _#952: https://github.com/jschneier/django-storages/pull/952 .. _#898: https://github.com/jschneier/django-storages/pull/898 .. _#964: https://github.com/jschneier/django-storages/pull/964 .. _#958: https://github.com/jschneier/django-storages/issues/958 .. _#965: https://github.com/jschneier/django-storages/pull/965 1.10.1 (2020-09-13) ******************* S3 -- - Restore ``AWS_DEFAULT_ACL`` handling. This setting is ignored if ``ACL`` is set in ``AWS_S3_OBJECT_PARAMETERS`` (`#934`_) SFTP ---- - Fix using ``SFTP_STORAGE_HOST`` (`#926`_) .. _#926: https://github.com/jschneier/django-storages/pull/926 .. _#934: https://github.com/jschneier/django-storages/pull/934 1.10 (2020-08-30) ***************** General ------- - **Breaking**: Removed support for end-of-life Python 2.7 and 3.4 (`#709`_) - **Breaking**: Removed support for end-of-life Django 1.11 (`#891`_) - Add support for Django 3.1 (`#916`_) - Introduce a new ``BaseStorage`` class with a ``get_default_settings`` method and use it in ``S3Boto3Storage``, ``AzureStorage``, ``GoogleCloudStorage``, and ``SFTPStorage``. These backends now calculate their settings when instantiated, not imported. (`#524`_, `#852`_) S3 -- - **Breaking**: Automatic bucket creation has been removed. Doing so encourages using overly broad credentials. As a result, support for the corresponding ``AWS_BUCKET_ACL`` and ``AWS_AUTO_CREATE_BUCKET`` settings have been removed. (`#636`_) - **Breaking**: Support for the undocumented setting ``AWS_PRELOAD_METADATA`` has been removed (`#636`_) - **Breaking**: The constructor kwarg ``acl`` is no longer accepted. Instead, use the ``ACL`` key in setting ``AWS_S3_OBJECT_PARAMETERS`` (`#636`_) - **Breaking**: The constructor kwarg ``bucket`` is no longer accepted. Instead, use ``bucket_name`` or the ``AWS_STORAGE_BUCKET_NAME`` setting (`#636`_) - **Breaking**: Support for setting ``AWS_REDUCED_REDUNDANCY`` has been removed. Replace with ``StorageClass=REDUCED_REDUNDANCY`` in ``AWS_S3_OBJECT_PARAMETERS`` (`#636`_) - **Breaking**: Support for setting ``AWS_S3_ENCRYPTION`` has been removed. Replace with ``ServerSideEncryption=AES256`` in ``AWS_S3_OBJECT_PARAMETERS`` (`#636`_) - **Breaking**: Support for setting ``AWS_DEFAULT_ACL`` has been removed. Replace with ``ACL`` in ``AWS_S3_OBJECT_PARAMETERS`` (`#636`_) - Add ``http_method`` parameter to ``.url`` method (`#854`_) - Add support for signing Cloudfront URLs to the ``.url`` method. You must set ``AWS_CLOUDFRONT_KEY``, ``AWS_CLOUDFRONT_KEY_ID`` and install either `cryptography`_ or `rsa`_ (`#456`_, `#587`_). See the docs for more info. URLs will only be signed if ``AWS_QUERYSTRING_AUTH`` is set to ``True`` (`#885`_) Google Cloud ------------ - **Breaking**: Automatic bucket creation has been removed. Doing so encourages using overly broad credentials. As a result, support for the corresponding ``GS_AUTO_CREATE_BUCKET`` and ``GS_AUTO_CREATE_ACL`` settings have been removed. (`#894`_) Dropbox ------- - Add ``DROPBOX_WRITE_MODE`` setting to control e.g. overwriting behavior. Check the docs for more info (`#873`_, `#138`_) SFTP ---- - Remove exception swallowing during ssh connection (`#835`_, `#838`_) FTP --- - Add ``FTP_STORAGE_ENCODING`` setting to set the filesystem encoding (`#803`_) - Support multiple nested paths for files (`#886`_) .. _cryptography: https://cryptography.io .. _rsa: https://stuvel.eu/rsa .. _#885: https://github.com/jschneier/django-storages/pull/885 .. _#894: https://github.com/jschneier/django-storages/pull/894 .. _#636: https://github.com/jschneier/django-storages/pull/636 .. _#709: https://github.com/jschneier/django-storages/pull/709 .. _#891: https://github.com/jschneier/django-storages/pull/891 .. _#916: https://github.com/jschneier/django-storages/pull/916 .. _#852: https://github.com/jschneier/django-storages/pull/852 .. _#873: https://github.com/jschneier/django-storages/pull/873 .. _#854: https://github.com/jschneier/django-storages/pull/854 .. _#138: https://github.com/jschneier/django-storages/issues/138 .. _#524: https://github.com/jschneier/django-storages/pull/524 .. _#835: https://github.com/jschneier/django-storages/issues/835 .. _#838: https://github.com/jschneier/django-storages/pull/838 .. _#803: https://github.com/jschneier/django-storages/pull/803 .. _#456: https://github.com/jschneier/django-storages/issues/456 .. _#587: https://github.com/jschneier/django-storages/pull/587 .. _#886: https://github.com/jschneier/django-storages/pull/886 1.9.1 (2020-02-03) ****************** S3 -- - Fix reading files with ``S3Boto3StorageFile`` (`#831`_, `#833`_) .. _#831: https://github.com/jschneier/django-storages/issues/831 .. _#833: https://github.com/jschneier/django-storages/pull/833 1.9 (2020-02-02) **************** General ------- - **Breaking**: The long deprecated S3 backend based on ``boto`` has been removed. (`#825`_) - Test against and support Python 3.8 (`#810`_) S3 -- - **Deprecated**: Automatic bucket creation will be removed in version 1.10 (`#826`_) - **Deprecated**: The undocumented ``AWS_PRELOAD_METADATA`` and associated functionality will be removed in version 1.10 (`#829`_) - **Deprecated**: Support for ``AWS_REDUCED_REDUNDANCY`` will be removed in version 1.10 Replace with ``StorageClass=REDUCED_REDUNDANCY`` in ``AWS_S3_OBJECT_PARAMETERS`` (`#829`_) - **Deprecated**: Support for ``AWS_S3_ENCRYPTION`` will be removed in version 1.10 (`#829`_) Replace with ``ServerSideEncryption=AES256`` in ``AWS_S3_OBJECT_PARAMETERS`` - A custom ``ContentEncoding`` is no longer overwritten automatically (note that specifying one will disable automatic ``gzip``) (`#391`_, `#828`_). - Add ``S3Boto3Storage.get_object_parameters``, an overridable method for customizing upload parameters on a per-object basis (`#819`_, `#828`_) - Opening and closing a file in `w` mode without writing anything will now create an empty file in S3, this mimics the builtin ``open`` and Django's own ``FileSystemStorage`` (`#435`_, `#816`_) - Fix reading a file in text mode (`#404`_, `#827`_) Google Cloud ------------ - **Deprecated**: Automatic bucket creation will be removed in version 1.10 (`#826`_) Dropbox ------- - Fix crash on ``DropBoxStorage.listdir`` (`#762`_) - Settings can now additionally be specified at the class level to ease subclassing (`#745`_) Libcloud -------- - Add support for Backblaze B2 to ``LibCloudStorage.url`` (`#807`_) FTP --- - Fix creating multiple intermediary directories on Windows (`#823`_, `#824`_) .. _#825: https://github.com/jschneier/django-storages/pull/825 .. _#826: https://github.com/jschneier/django-storages/pull/826 .. _#829: https://github.com/jschneier/django-storages/pull/829 .. _#391: https://github.com/jschneier/django-storages/issues/391 .. _#828: https://github.com/jschneier/django-storages/pull/828 .. _#819: https://github.com/jschneier/django-storages/issues/819 .. _#810: https://github.com/jschneier/django-storages/pull/810 .. _#435: https://github.com/jschneier/django-storages/issues/435 .. _#816: https://github.com/jschneier/django-storages/pull/816 .. _#404: https://github.com/jschneier/django-storages/issues/404 .. _#827: https://github.com/jschneier/django-storages/pull/827 .. _#762: https://github.com/jschneier/django-storages/pull/762 .. _#745: https://github.com/jschneier/django-storages/pull/745 .. _#807: https://github.com/jschneier/django-storages/pull/807 .. _#823: https://github.com/jschneier/django-storages/issues/823 .. _#824: https://github.com/jschneier/django-storages/pull/824 1.8 (2019-11-20) **************** General ------- - Add support for Django 3.0 (`#759`_) - Update license identifier to unambiguous ``BSD-3-Clause`` S3 -- - Include error message raised when missing library is imported (`#776`_, `#793`_) Google ------ - **Breaking** The minimum supported version of ``google-cloud-storage`` is now ``1.15.0`` which enables... - Add setting ``GS_CUSTOM_ENDPOINT`` to allow usage of custom domains (`#775`_, `#648`_) Azure ----- - Fix extra installation by pinning version to < 12 (`#785`_) - Add support for setting ``AZURE_CACHE_CONTROL`` header (`#780`_, `#674`_) .. _#759: https://github.com/jschneier/django-storages/pull/759 .. _#776: https://github.com/jschneier/django-storages/issues/776 .. _#793: https://github.com/jschneier/django-storages/pull/793 .. _#775: https://github.com/jschneier/django-storages/issues/775 .. _#648: https://github.com/jschneier/django-storages/pull/648 .. _#785: https://github.com/jschneier/django-storages/pull/785 .. _#780: https://github.com/jschneier/django-storages/pull/780 .. _#674: https://github.com/jschneier/django-storages/issues/674 1.7.2 (2019-09-10) ****************** S3 -- - Avoid misleading ``AWS_DEFAULT_ACL`` warning for insecure ``default_acl`` when overridden as a class variable (`#591`_) - Propagate file deletion to cache when ``preload_metadata`` is ``True``, (not the default) (`#743`_, `#749`_) - Fix exception raised on closed file (common if using ``ManifestFilesMixin`` or ``collectstatic``. (`#382`_, `#754`_) Azure ----- - Pare down the required packages in ``extra_requires`` when installing the ``azure`` extra to only ``azure-storage-blob`` (`#680`_, `#684`_) - Fix compatibility with ``generate_blob_shared_access_signature`` updated signature (`#705`_, `#723`_) - Fetching a file now uses the configured timeout rather than hardcoding one (`#727`_) - Add support for configuring all blobservice options: ``AZURE_ENDPOINT_SUFFIX``, ``AZURE_CUSTOM_DOMAIN``, ``AZURE_CONNECTION_STRING``, ``AZURE_TOKEN_CREDENTIAL``. See the docs for more info. Huge thanks once again to @nitely. (`#750`_) - Fix filename handling to not strip special characters (`#609`_, `#752`_) Google Cloud ------------ - Set the file acl in the same call that uploads it (`#698`_) - Reduce the number of queries and required permissions when ``GS_AUTO_CREATE_BUCKET`` is ``False`` (the default) (`#412`_, `#718`_) - Set the ``predefined_acl`` when creating a ``GoogleCloudFile`` using ``.write`` (`#640`_, `#756`_) - Add ``GS_BLOB_CHUNK_SIZE`` setting to enable efficient uploading of large files (`#757`_) Dropbox ------- - Complete migration to v2 api with file fetching and metadata fixes (`#724`_) - Add ``DROPBOX_TIMEOUT`` to configure client timeout defaulting to 100 seconds to match the underlying sdk. (`#419`_, `#747`_) SFTP ---- - Fix reopening a file (`#746`_) .. _#591: https://github.com/jschneier/django-storages/pull/591 .. _#680: https://github.com/jschneier/django-storages/issues/680 .. _#684: https://github.com/jschneier/django-storages/pull/684 .. _#698: https://github.com/jschneier/django-storages/pull/698 .. _#705: https://github.com/jschneier/django-storages/issues/705 .. _#723: https://github.com/jschneier/django-storages/pull/723 .. _#727: https://github.com/jschneier/django-storages/pull/727 .. _#746: https://github.com/jschneier/django-storages/pull/746 .. _#724: https://github.com/jschneier/django-storages/pull/724 .. _#412: https://github.com/jschneier/django-storages/pull/412 .. _#718: https://github.com/jschneier/django-storages/pull/718 .. _#743: https://github.com/jschneier/django-storages/issues/743 .. _#749: https://github.com/jschneier/django-storages/pull/749 .. _#750: https://github.com/jschneier/django-storages/pull/750 .. _#609: https://github.com/jschneier/django-storages/issues/609 .. _#752: https://github.com/jschneier/django-storages/pull/752 .. _#382: https://github.com/jschneier/django-storages/issues/382 .. _#754: https://github.com/jschneier/django-storages/pull/754 .. _#419: https://github.com/jschneier/django-storages/issues/419 .. _#747: https://github.com/jschneier/django-storages/pull/747 .. _#640: https://github.com/jschneier/django-storages/issues/640 .. _#756: https://github.com/jschneier/django-storages/pull/756 .. _#757: https://github.com/jschneier/django-storages/pull/757 1.7.1 (2018-09-06) ****************** - Fix off-by-1 error in ``get_available_name`` whenever ``file_overwrite`` or ``overwrite_files`` is ``True`` (`#588`_, `#589`_) - Change ``S3Boto3Storage.listdir()`` to use ``list_objects`` instead of ``list_objects_v2`` to restore compatibility with services implementing the S3 protocol that do not yet support the new method (`#586`_, `#590`_) .. _#588: https://github.com/jschneier/django-storages/issues/588 .. _#589: https://github.com/jschneier/django-storages/pull/589 .. _#586: https://github.com/jschneier/django-storages/issues/586 .. _#590: https://github.com/jschneier/django-storages/pull/590 1.7 (2018-09-03) **************** **Security** - The ``S3BotoStorage`` and ``S3Boto3Storage`` backends have an insecure default ACL of ``public-read``. It is recommended that all current users audit their bucket permissions. Support has been added for setting ``AWS_DEFAULT_ACL = None`` and ``AWS_BUCKET_ACL = None`` which causes all created files to inherit the bucket's ACL (and created buckets to inherit the Amazon account's default ACL). This will become the default in version 1.10 (for ``S3Boto3Storage`` only since ``S3BotoStorage`` will be removed in version 1.9, see below). Additionally, a warning is now raised if ``AWS_DEFAULT_ACL`` or ``AWS_BUCKET_ACL`` is not explicitly set. (`#381`_, `#535`_, `#579`_) **Breaking** - The ``AzureStorage`` backend and documentation has been completely rewritten. It now depends on ``azure`` and ``azure-storage-blob`` and is *vastly* improved. Big thanks to @nitely and all other contributors along the way (`#565`_) - The ``.url()`` method of ``GoogleCloudStorage`` has been completely reworked. Many use cases should require no changes and will experience a massive speedup. The ``.url()`` method no longer hits the network for public urls and generates signed urls (with a default of 1-day expiration, configurable via ``GS_EXPIRATION``) for non-public buckets. Check out the docs for more information. (`#570`_) - Various backends will now raise ``ImproperlyConfigured`` at runtime if their location (``GS_LOCATION``, ``AWS_LOCATION``) begins with a leading ``/`` rather than silently stripping it. Verify yours does not. (`#520`_) - The long deprecated ``GSBotoStorage`` backend is removed. (`#518`_) **Deprecation** - The insecure default of ``public-read`` for ``AWS_DEFAULT_ACL`` and ``AWS_BUCKET_ACL`` in ``S3Boto3Storage`` will change to inherit the bucket's setting in version 1.10 (`#579`_) - The legacy ``S3BotoBackend`` is deprecated and will be removed in version 1.9. It is strongly recommended to move to the ``S3Boto3Storage`` backend for performance, stability and bugfix reasons. See the `boto migration docs`_ for step-by-step guidelines. (`#578`_, `#584`_) - The long aliased arguments to ``S3Boto3Storage`` of ``acl`` and ``bucket`` are deprecated in favor of ``bucket_name`` and ``default_acl`` (`#516`_) - The minimum required version of ``boto3`` will be increasing to ``1.4.4`` in the next major version of ``django-storages``. (`#583`_) **Features** - Add support for a file to inherit its bucket's ACL by setting ``AWS_DEFAULT_ACL = None`` (`#535`_) - Add ``GS_CACHE_CONTROL`` setting for ``GoogleCloudStorage`` backend (`#411`_, `#505`_) - Add documentation around using django-storages with Digital Ocean Spaces (`#521`_) - Add support for Django 2.1 and Python 3.7 (`#530`_) - Make ``S3Boto3Storage`` pickleable (`#551`_) - Add automatic reconnection to ``SFTPStorage`` (`#563`_, `#564`_) - Unconditionally set the security token in the boto backends (`b13efd`_) - Improve efficiency of ``.listdir`` on ``S3Boto3Storage`` (`#352`_) - Add ``AWS_S3_VERIFY`` to support custom certificates and disabling certificate verification to ``S3Boto3Storage`` (`#486`_, `#580`_) - Add ``AWS_S3_PROXIES`` setting to ``S3Boto3Storage`` (`#583`_) - Add a snazzy new logo. Big thanks to @reallinfo **Bugfixes** - Reset file read offset before passing to ``GoogleCloudStorage`` and ``AzureStorage`` (`#481`_, `#581`_, `#582`_) - Fix various issues with multipart uploads in the S3 backends (`#169`_, `#160`_, `#364`_, `#449`_, `#504`_, `#506`_, `#546`_) - Fix ``S3Boto3Storage`` to stream down large files (also disallow `r+w` mode) (`#383`_, `#548`_) - Fix ``SFTPStorageFile`` to align with the core ``File`` abstraction (`#487`_, `#568`_) - Catch ``IOError`` in ``SFTPStorage.delete`` (`#568`_) - ``AzureStorage``, ``GoogleCloudStorage``, ``S3Boto3Storage`` and ``S3BotoStorage`` now respect ``max_length`` when ``file_overwrite = True`` (`#513`_, `#554`_) - The S3 backends now consistently use ``compresslevel=9`` (the Python stdlib default) for gzipped content (`#572`_, `#576`_) - Improve error message of ``S3Boto3Storage`` during an unexpected exception when automatically creating a bucket (`#574`_, `#577`_) .. _#381: https://github.com/jschneier/django-storages/issues/381 .. _#535: https://github.com/jschneier/django-storages/pull/535 .. _#579: https://github.com/jschneier/django-storages/pull/579 .. _#565: https://github.com/jschneier/django-storages/pull/565 .. _#520: https://github.com/jschneier/django-storages/pull/520 .. _#518: https://github.com/jschneier/django-storages/pull/518 .. _#516: https://github.com/jschneier/django-storages/pull/516 .. _#481: https://github.com/jschneier/django-storages/pull/481 .. _#581: https://github.com/jschneier/django-storages/pull/581 .. _#582: https://github.com/jschneier/django-storages/pull/582 .. _#411: https://github.com/jschneier/django-storages/issues/411 .. _#505: https://github.com/jschneier/django-storages/pull/505 .. _#521: https://github.com/jschneier/django-storages/pull/521 .. _#169: https://github.com/jschneier/django-storages/pull/169 .. _#160: https://github.com/jschneier/django-storages/issues/160 .. _#364: https://github.com/jschneier/django-storages/pull/364 .. _#449: https://github.com/jschneier/django-storages/issues/449 .. _#504: https://github.com/jschneier/django-storages/pull/504 .. _#530: https://github.com/jschneier/django-storages/pull/530 .. _#506: https://github.com/jschneier/django-storages/pull/506 .. _#546: https://github.com/jschneier/django-storages/pull/546 .. _#383: https://github.com/jschneier/django-storages/issues/383 .. _#548: https://github.com/jschneier/django-storages/pull/548 .. _b13efd: https://github.com/jschneier/django-storages/commit/b13efd92b3bf3e9967b8e7819224bfcf9abb977e .. _#551: https://github.com/jschneier/django-storages/pull/551 .. _#563: https://github.com/jschneier/django-storages/issues/563 .. _#564: https://github.com/jschneier/django-storages/pull/564 .. _#487: https://github.com/jschneier/django-storages/issues/487 .. _#568: https://github.com/jschneier/django-storages/pull/568 .. _#513: https://github.com/jschneier/django-storages/issues/513 .. _#554: https://github.com/jschneier/django-storages/pull/554 .. _#570: https://github.com/jschneier/django-storages/pull/570 .. _#572: https://github.com/jschneier/django-storages/issues/572 .. _#576: https://github.com/jschneier/django-storages/pull/576 .. _#352: https://github.com/jschneier/django-storages/pull/352 .. _#574: https://github.com/jschneier/django-storages/issues/574 .. _#577: https://github.com/jschneier/django-storages/pull/577 .. _#486: https://github.com/jschneier/django-storages/pull/486 .. _#580: https://github.com/jschneier/django-storages/pull/580 .. _#583: https://github.com/jschneier/django-storages/pull/583 .. _boto migration docs: https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#migrating-boto-to-boto3 .. _#578: https://github.com/jschneier/django-storages/pull/578 .. _#584: https://github.com/jschneier/django-storages/pull/584 1.6.6 (2018-03-26) ****************** * You can now specify the backend you are using to install the necessary dependencies using ``extra_requires``. For example ``pip install django-storages[boto3]`` (`#417`_) * Add additional content-type detection fallbacks (`#406`_, `#407`_) * Add ``GS_LOCATION`` setting to specify subdirectory for ``GoogleCloudStorage`` (`#355`_) * Add support for uploading large files to ``DropBoxStorage``, fix saving files (`#379`_, `#378`_, `#301`_) * Drop support for Django 1.8 and Django 1.10 (and hence Python 3.3) (`#438`_) * Implement ``get_created_time`` for ``GoogleCloudStorage`` (`#464`_) .. _#417: https://github.com/jschneier/django-storages/pull/417 .. _#407: https://github.com/jschneier/django-storages/pull/407 .. _#406: https://github.com/jschneier/django-storages/issues/406 .. _#355: https://github.com/jschneier/django-storages/pull/355 .. _#379: https://github.com/jschneier/django-storages/pull/379 .. _#378: https://github.com/jschneier/django-storages/issues/378 .. _#301: https://github.com/jschneier/django-storages/issues/301 .. _#438: https://github.com/jschneier/django-storages/issues/438 .. _#464: https://github.com/jschneier/django-storages/pull/464 1.6.5 (2017-08-01) ****************** * Fix Django 1.11 regression with gzipped content being saved twice resulting in empty files (`#367`_, `#371`_, `#373`_) * Fix the ``mtime`` when gzipping content on ``S3Boto3Storage`` (`#374`_) .. _#367: https://github.com/jschneier/django-storages/issues/367 .. _#371: https://github.com/jschneier/django-storages/pull/371 .. _#373: https://github.com/jschneier/django-storages/pull/373 .. _#374: https://github.com/jschneier/django-storages/pull/374 1.6.4 (2017-07-27) ****************** * Files uploaded with ``GoogleCloudStorage`` will now set their appropriate mimetype (`#320`_) * Fix ``DropBoxStorage.url`` to work. (`#357`_) * Fix ``S3Boto3Storage`` when ``AWS_PRELOAD_METADATA = True`` (`#366`_) * Fix ``S3Boto3Storage`` uploading file-like objects without names (`#195`_, `#368`_) * ``S3Boto3Storage`` is now threadsafe - a separate session is created on a per-thread basis (`#268`_, `#358`_) .. _#320: https://github.com/jschneier/django-storages/pull/320 .. _#357: https://github.com/jschneier/django-storages/pull/357 .. _#366: https://github.com/jschneier/django-storages/pull/366 .. _#195: https://github.com/jschneier/django-storages/pull/195 .. _#368: https://github.com/jschneier/django-storages/pull/368 .. _#268: https://github.com/jschneier/django-storages/issues/268 .. _#358: https://github.com/jschneier/django-storages/pull/358 1.6.3 (2017-06-23) ****************** * Revert default ``AWS_S3_SIGNATURE_VERSION`` to V2 to restore backwards compatibility in ``S3Boto3``. It's recommended that all new projects set this to be ``'s3v4'``. (`#344`_) .. _#344: https://github.com/jschneier/django-storages/pull/344 1.6.2 (2017-06-22) ****************** * Fix regression in ``safe_join()`` to handle a trailing slash in an intermediate path. (`#341`_) * Fix regression in ``gs.GSBotoStorage`` getting an unexpected kwarg. (`#342`_) .. _#341: https://github.com/jschneier/django-storages/pull/341 .. _#342: https://github.com/jschneier/django-storages/pull/342 1.6.1 (2017-06-22) ****************** * Drop support for Django 1.9 (`e89db45`_) * Fix regression in ``safe_join()`` to allow joining a base path with an empty string. (`#336`_) .. _e89db45: https://github.com/jschneier/django-storages/commit/e89db451d7e617638b5991e31df4c8de196546a6 .. _#336: https://github.com/jschneier/django-storages/pull/336 1.6 (2017-06-21) ****************** * **Breaking:** Remove backends deprecated in v1.5.1 (`#280`_) * **Breaking:** ``DropBoxStorage`` has been upgrade to support v2 of the API, v1 will be shut off at the end of the month - upgrading is recommended (`#273`_) * **Breaking:** The ``SFTPStorage`` backend now checks for the existence of the fallback ``~/.ssh/known_hosts`` before attempting to load it. If you had previously been passing in a path to a non-existent file it will no longer attempt to load the fallback. (`#118`_, `#325`_) * **Breaking:** The default version value for ``AWS_S3_SIGNATURE_VERSION`` is now ``'s3v4'``. No changes should be required (`#335`_) * **Deprecation:** The undocumented ``gs.GSBotoStorage`` backend. See the new ``gcloud.GoogleCloudStorage`` or ``apache_libcloud.LibCloudStorage`` backends instead. (`#236`_) * Add a new backend, ``gcloud.GoogleCloudStorage`` based on the ``google-cloud`` bindings. (`#236`_) * Pass in the location constraint when auto creating a bucket in ``S3Boto3Storage`` (`#257`_, `#258`_) * Add support for reading ``AWS_SESSION_TOKEN`` and ``AWS_SECURITY_TOKEN`` from the environment to ``S3Boto3Storage`` and ``S3BotoStorage``. (`#283`_) * Fix Boto3 non-ascii filenames on Python 2.7 (`#216`_, `#217`_) * Fix ``collectstatic`` timezone handling in and add ``get_modified_time`` to ``S3BotoStorage`` (`#290`_) * Add support for Django 1.11 (`#295`_) * Add ``project`` keyword support to GCS in ``LibCloudStorage`` backend (`#269`_) * Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in the ``s3boto3`` backend (`#263`_, `#264`_) * The Dropbox backend now properly translates backslashes in Windows paths into forward slashes (`e52a127`_) * The S3 backends now permit colons in the keys (`#248`_, `#322`_) .. _#217: https://github.com/jschneier/django-storages/pull/217 .. _#273: https://github.com/jschneier/django-storages/pull/273 .. _#216: https://github.com/jschneier/django-storages/issues/216 .. _#283: https://github.com/jschneier/django-storages/pull/283 .. _#280: https://github.com/jschneier/django-storages/pull/280 .. _#257: https://github.com/jschneier/django-storages/issues/257 .. _#258: https://github.com/jschneier/django-storages/pull/258 .. _#290: https://github.com/jschneier/django-storages/pull/290 .. _#295: https://github.com/jschneier/django-storages/pull/295 .. _#269: https://github.com/jschneier/django-storages/pull/269 .. _#263: https://github.com/jschneier/django-storages/issues/263 .. _#264: https://github.com/jschneier/django-storages/pull/264 .. _e52a127: https://github.com/jschneier/django-storages/commit/e52a127523fdd5be50bb670ccad566c5d527f3d1 .. _#236: https://github.com/jschneier/django-storages/pull/236 .. _#118: https://github.com/jschneier/django-storages/issues/118 .. _#325: https://github.com/jschneier/django-storages/pull/325 .. _#248: https://github.com/jschneier/django-storages/issues/248 .. _#322: https://github.com/jschneier/django-storages/pull/322 .. _#335: https://github.com/jschneier/django-storages/pull/335 1.5.2 (2017-01-13) ****************** * Actually use ``SFTP_STORAGE_HOST`` in ``SFTPStorage`` backend (`#204`_) * Fix ``S3Boto3Storage`` to avoid race conditions in a multi-threaded WSGI environment (`#238`_) * Fix trying to localize a naive datetime when ``settings.USE_TZ`` is ``False`` in ``S3Boto3Storage.modified_time``. (`#235`_, `#234`_) * Fix automatic bucket creation in ``S3Boto3Storage`` when ``AWS_AUTO_CREATE_BUCKET`` is ``True`` (`#196`_) * Improve the documentation for the S3 backends .. _#204: https://github.com/jschneier/django-storages/pull/204 .. _#238: https://github.com/jschneier/django-storages/pull/238 .. _#234: https://github.com/jschneier/django-storages/issues/234 .. _#235: https://github.com/jschneier/django-storages/pull/235 .. _#196: https://github.com/jschneier/django-storages/pull/196 1.5.1 (2016-09-13) ****************** * **Breaking:** Drop support for Django 1.7 (`#185`_) * **Deprecation:** hashpath, image, overwrite, mogile, symlinkorcopy, database, mogile, couchdb. See (`#202`_) to discuss maintenance going forward * Use a fixed ``mtime`` argument for ``GzipFile`` in ``S3BotoStorage`` and ``S3Boto3Storage`` to ensure a stable output for gzipped files * Use ``.putfileobj`` instead of ``.put`` in ``S3Boto3Storage`` to use the transfer manager, allowing files greater than 5GB to be put on S3 (`#194`_ , `#201`_) * Update ``S3Boto3Storage`` for Django 1.10 (`#181`_) (``get_modified_time`` and ``get_accessed_time``) * Fix bad kwarg name in ``S3Boto3Storage`` when `AWS_PRELOAD_METADATA` is `True` (`#189`_, `#190`_) .. _#202: https://github.com/jschneier/django-storages/issues/202 .. _#201: https://github.com/jschneier/django-storages/pull/201 .. _#194: https://github.com/jschneier/django-storages/issues/194 .. _#190: https://github.com/jschneier/django-storages/pull/190 .. _#189: https://github.com/jschneier/django-storages/issues/189 .. _#185: https://github.com/jschneier/django-storages/pull/185 .. _#181: https://github.com/jschneier/django-storages/pull/181 1.5.0 (2016-08-02) ****************** * Add new backend ``S3Boto3Storage`` (`#179`_) * Add a `strict` option to `utils.setting` (`#176`_) * Tests, documentation, fixing ``.close`` for ``SFTPStorage`` (`#177`_) * Tests, documentation, add `.readlines` for ``FTPStorage`` (`#175`_) * Tests and documentation for ``DropBoxStorage`` (`#174`_) * Fix ``MANIFEST.in`` to not ship ``.pyc`` files. (`#145`_) * Enable CI testing of Python 3.5 and fix test failure from api change (`#171`_) .. _#145: https://github.com/jschneier/django-storages/pull/145 .. _#171: https://github.com/jschneier/django-storages/pull/171 .. _#174: https://github.com/jschneier/django-storages/pull/174 .. _#175: https://github.com/jschneier/django-storages/pull/175 .. _#177: https://github.com/jschneier/django-storages/pull/177 .. _#176: https://github.com/jschneier/django-storages/pull/176 .. _#179: https://github.com/jschneier/django-storages/pull/179 1.4.1 (2016-04-07) ****************** * Files that have a guessable encoding (e.g. gzip or compress) will be uploaded with that Content-Encoding in the ``s3boto`` backend. Compressable types such as ``application/javascript`` will still be gzipped. PR `#122`_ * Fix ``DropBoxStorage.exists`` check and add ``DropBoxStorage.url`` (`#127`_) * Add ``GS_HOST`` setting (with a default of ``GSConnection.DefaultHost``) to fix ``GSBotoStorage``. (`#124`_, `#125`_) .. _#122: https://github.com/jschneier/django-storages/pull/122 .. _#127: https://github.com/jschneier/django-storages/pull/127 .. _#124: https://github.com/jschneier/django-storages/issues/124 .. _#125: https://github.com/jschneier/django-storages/pull/125 1.4 (2016-02-07) **************** * This package is now released on PyPI as `django-storages`. Please update your requirements files to `django-storages==1.4`. 1.3.2 (2016-01-26) ****************** * Fix memory leak from not closing underlying temp file in ``s3boto`` backend (`#106`_) * Allow easily specifying a custom expiry time when generating a url for ``S3BotoStorage`` (`#96`_) * Check for bucket existence when the empty path ('') is passed to ``storage.exists`` in ``S3BotoStorage`` - this prevents a crash when running ``collectstatic -c`` on Django 1.9.1 (`#112`_) fixed in `#116`_ .. _#106: https://github.com/jschneier/django-storages/pull/106 .. _#96: https://github.com/jschneier/django-storages/pull/96 .. _#112: https://github.com/jschneier/django-storages/issues/112 .. _#116: https://github.com/jschneier/django-storages/pull/116 1.3.1 (2016-01-12) ****************** * A few Azure Storage fixes [pass the content-type to Azure, handle chunked content, fix ``url``] (`#45`__) * Add support for a Dropbox (``dropbox``) storage backend * Various fixes to the ``apache_libcloud`` backend [return the number of bytes asked for by ``.read``, make ``.name`` non-private, don't initialize to an empty ``BytesIO`` object] (`#55`_) * Fix multi-part uploads in ``s3boto`` backend not respecting ``AWS_S3_ENCRYPTION`` (`#94`_) * Automatically gzip svg files (`#100`_) .. __: https://github.com/jschneier/django-storages/pull/45 .. _#76: https://github.com/jschneier/django-storages/pull/76 .. _#55: https://github.com/jschneier/django-storages/pull/55 .. _#94: https://github.com/jschneier/django-storages/pull/94 .. _#100: https://github.com/jschneier/django-storages/pull/100 1.3 (2015-08-14) **************** * **Breaking:** Drop Support for Django 1.5 and Python 2.6 * **Breaking:** Remove previously deprecated mongodb backend * **Breaking:** Remove previously deprecated ``parse_ts_extended`` from s3boto storage * Add support for Django 1.8+ (`#36`__) * Add ``AWS_S3_PROXY_HOST`` and ``AWS_S3_PROXY_PORT`` settings for s3boto backend (`#41`_) * Fix Python3K compat issue in apache_libcloud (`#52`_) * Fix Google Storage backend not respecting ``GS_IS_GZIPPED`` setting (`#51`__, `#60`_) * Rename FTP ``_name`` attribute to ``name`` which is what the Django ``File`` api is expecting (`#70`_) * Put ``StorageMixin`` first in inheritance to maintain backwards compat with older versions of Django (`#63`_) .. __: https://github.com/jschneier/django-storages/pull/36 .. _#41: https://github.com/jschneier/django-storages/pull/41 .. _#52: https://github.com/jschneier/django-storages/issues/52 .. __: https://github.com/jschneier/django-storages/pull/51 .. _#60: https://github.com/jschneier/django-storages/pull/60 .. _#70: https://github.com/jschneier/django-storages/pull/70 .. _#63: https://github.com/jschneier/django-storages/pull/63 1.2.3 (2015-03-14) ****************** * Variety of FTP backend fixes (fix ``exists``, add ``modified_time``, remove call to non-existent function) (`#26`_) * Apparently the year changed to 2015 .. _#26: https://github.com/jschneier/django-storages/pull/26 1.2.2 (2015-01-28) ****************** * Remove always show all warnings filter (`#21`_) * Release package as a wheel * Avoid resource warning during install (`#20`__) * Made ``S3BotoStorage`` deconstructible (previously only ``S3BotoStorageFile`` was deconstructible) (`#19`_) .. _#21: https://github.com/jschneier/django-storages/pull/21 .. __: https://github.com/jschneier/django-storages/issues/20 .. _#19: https://github.com/jschneier/django-storages/pull/19 1.2.1 (2014-12-31) ****************** * **Deprecation:** Issue warning about ``parse_ts_extended`` * **Deprecation:** mongodb backend - django-mongodb-engine now ships its own storage backend * Fix ``storage.modified_time`` crashing on new files when ``AWS_PRELOAD_METADATA=True`` (`#11`_, `#12`__, `#14`_) .. _#11: https://github.com/jschneier/django-storages/pull/11 __ https://github.com/jschneier/django-storages/issues/12 .. _#14: https://github.com/jschneier/django-storages/pull/14 1.2 (2014-12-14) **************** * **Breaking:** Remove legacy S3 storage (`#1`_) * **Breaking:** Remove mosso files backend (`#2`_) * Add text/javascript mimetype to S3BotoStorage gzip allowed defaults * Add support for Django 1.7 migrations in S3BotoStorage and ApacheLibCloudStorage (`#5`_, `#8`_) * Python3K (3.3+) now available for S3Boto backend (`#4`_) .. _#8: https://github.com/jschneier/django-storages/pull/8 .. _#5: https://github.com/jschneier/django-storages/pull/5 .. _#4: https://github.com/jschneier/django-storages/pull/4 .. _#1: https://github.com/jschneier/django-storages/issues/1 .. _#2: https://github.com/jschneier/django-storages/issues/2 **NOTE**: Version 1.1.9 is the first release of django-storages after the fork. It represents the current (2014-12-08) state of the original django-storages in master with no additional changes. This is the first release of the code base since March 2013. 1.1.9 (2014-12-08) ****************** * Fix syntax for Python3 with pull-request `#91`_ * Support pushing content type from File object to GridFS with pull-request `#90`_ * Support passing a region to the libcloud driver with pull-request `#86`_ * Handle trailing slash paths fixes `#188`_ fixed by pull-request `#85`_ * Use a SpooledTemporaryFile to conserve memory in S3BotoFile pull-request `#69`_ * Guess content-type for S3BotoStorageFile the same way that _save() in S3BotoStorage does * Pass headers and response_headers through from url to generate_url in S3BotoStorage pull-request `#65`_ * Added AWS_S3_HOST, AWS_S3_PORT and AWS_S3_USE_SSL settings to specify host, port and is_secure in pull-request `#66`_ .. _#91: https://bitbucket.org/david/django-storages/pull-request/91/ .. _#90: https://bitbucket.org/david/django-storages/pull-request/90/ .. _#86: https://bitbucket.org/david/django-storages/pull-request/86/ .. _#188: https://bitbucket.org/david/django-storages/issue/188/s3boto-_clean_name-is-broken-and-leads-to .. _#85: https://bitbucket.org/david/django-storages/pull-request/85/ .. _#69: https://bitbucket.org/david/django-storages/pull-request/69/ .. _#66: https://bitbucket.org/david/django-storages/pull-request/66/ .. _#65: https://bitbucket.org/david/django-storages/pull-request/65/ **Everything Below Here Was Previously Released on PyPI under django-storages** 1.1.8 (2013-03-31) ****************** * Fixes `#156`_ regarding date parsing, ValueError when running collectstatic * Proper handling of boto dev version parsing * Made SFTP URLs accessible, now uses settings.MEDIA_URL instead of sftp:// .. _#156: https://bitbucket.org/david/django-storages/issue/156/s3boto-backend-valueerror-time-data-thu-07 1.1.7 (2013-03-20) ****************** * Listing of huge buckets on S3 is now prevented by using the prefix argument to boto's list() method * Initial support for Windows Azure Storage * Switched to useing boto's parse_ts date parser getting last modified info when using S3boto backend * Fixed key handling in S3boto and Google Storage backends * Account for lack of multipart upload in Google Storage backend * Fixed seek() issue when using AWS_IS_GZIPPED by darkness51 with pull-request `#50`_ * Improvements to S3BotoStorage and GSBotoStorage .. _#50: https://bitbucket.org/david/django-storages/pull-request/50/ 1.1.6 (2013-01-06) ****************** * Merged many changes from Jannis Leidel (mostly regarding gzipping) * Fixed tests by Ian Lewis * Added support for Google Cloud Storage backend by Jannis Leidel * Updated license file by Dan Loewenherz, fixes `#133`_ with pull-request `#44`_ * Set Content-Type header for use in upload_part_from_file by Gerardo Curiel * Pass the rewind parameter to Boto's set_contents_from_file method by Jannis Leidel with pull-request `#45`_ * Fix for FTPStorageFile close() method by Mathieu Comandon with pull-request `#43`_ * Minor refactoring by Oktay Sancak with pull-request `#48`_ * Ungzip on download based on Content-Encoding by Gavin Wahl with pull-request `#46`_ * Add support for S3 server-side encryption by Tobias McNulty with pull-request `#17`_ * Add an optional setting to the boto storage to produce protocol-relative URLs, fixes `#105`_ .. _#133: https://bitbucket.org/david/django-storages/issue/133/license-file-refers-to-incorrect-project .. _#44: https://bitbucket.org/david/django-storages/pull-request/44/ .. _#45: https://bitbucket.org/david/django-storages/pull-request/45/ .. _#43: https://bitbucket.org/david/django-storages/pull-request/43/ .. _#48: https://bitbucket.org/david/django-storages/pull-request/48/ .. _#46: https://bitbucket.org/david/django-storages/pull-request/46/ .. _#17: https://bitbucket.org/david/django-storages/pull-request/17/ .. _#105: https://bitbucket.org/david/django-storages/issue/105/add-option-to-produce-protocol-relative 1.1.5 (2012-07-18) ****************** * Merged pull request `#36`_ from freakboy3742 Keith-Magee, improvements to Apache Libcloud backend and docs * Merged pull request `#35`_ from atodorov, allows more granular S3 access settings * Add support for SSL in Rackspace Cloudfiles backend * Fixed the listdir() method in s3boto backend, fixes `#57`_ * Added base url tests for safe_join in s3boto backend * Merged pull request `#20`_ from alanjds, fixed SuspiciousOperation warning if AWS_LOCATION ends with '/' * Added FILE_BUFFER_SIZE setting to s3boto backend * Merged pull request `#30`_ from pendletongp, resolves `#108`_, `#109`_ and `#110`_ * Updated the modified_time() method so that it doesn't require dateutil. fixes `#111`_ * Merged pull request `#16`_ from chamal, adds Apache Libcloud backend * When preloading the S3 metadata make sure we reset the files key during saving to prevent stale metadata * Merged pull request `#24`_ from tobias.mcnulty, fixes bug where s3boto backend returns modified_time in wrong time zone * Fixed HashPathStorage.location to no longer use settings.MEDIA_ROOT * Remove download_url from setup file so PyPI dist is used .. _#36: https://bitbucket.org/david/django-storages/pull-request/36/ .. _#35: https://bitbucket.org/david/django-storages/pull-request/35/ .. _#57: https://bitbucket.org/david/django-storages/issue/57 .. _#20: https://bitbucket.org/david/django-storages/pull-request/20/ .. _#30: https://bitbucket.org/david/django-storages/pull-request/30/ .. _#108: https://bitbucket.org/david/django-storages/issue/108 .. _#109: https://bitbucket.org/david/django-storages/issue/109 .. _#110: https://bitbucket.org/david/django-storages/issue/110 .. _#111: https://bitbucket.org/david/django-storages/issue/111 .. _#16: https://bitbucket.org/david/django-storages/pull-request/16/ .. _#24: https://bitbucket.org/david/django-storages/pull-request/24/ 1.1.4 (2012-01-06) ****************** * Added PendingDeprecationWarning for mosso backend * Merged pull request `#13`_ from marcoala, adds ``SFTP_KNOWN_HOST_FILE`` setting to SFTP storage backend * Merged pull request `#12`_ from ryankask, fixes HashPathStorage tests that delete remote media * Merged pull request `#10`_ from key, adds support for django-mongodb-engine 0.4.0 or later, fixes GridFS file deletion bug * Fixed S3BotoStorage performance problem calling modified_time() * Added deprecation warning for s3 backend, refs `#40`_ * Fixed CLOUDFILES_CONNECTION_KWARGS import error, fixes `#78`_ * Switched to sphinx documentation, set official docs up on https://django-storages.readthedocs.io/ * HashPathStorage uses self.exists now, fixes `#83`_ .. _#13: https://bitbucket.org/david/django-storages/pull-request/13/a-version-of-sftp-storage-that-allows-you .. _#12: https://bitbucket.org/david/django-storages/pull-request/12/hashpathstorage-tests-deleted-my-projects .. _#10: https://bitbucket.org/david/django-storages/pull-request/10/support-django-mongodb-engine-040 .. _#40: https://bitbucket.org/david/django-storages/issue/40/deprecate-s3py-backend .. _#78: https://bitbucket.org/david/django-storages/issue/78/import-error .. _#83: https://bitbucket.org/david/django-storages/issue/6/symlinkorcopystorage-new-custom-storage 1.1.3 (2011-08-15) ****************** * Created this lovely change log * Fixed `#89`_: broken StringIO import in CloudFiles backend * Merged `pull request #5`_: HashPathStorage path bug .. _#89: https://bitbucket.org/david/django-storages/issue/89/112-broke-the-mosso-backend .. _pull request #5: https://bitbucket.org/david/django-storages/pull-request/5/fixed-path-bug-and-added-testcase-for django-storages-1.14.5/LICENSE000066400000000000000000000030161475414346200157100ustar00rootroot00000000000000BSD 3-Clause License Copyright (c) 2008 - 2023, Josh Schneier, David Larlet, et al. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. django-storages-1.14.5/MANIFEST.in000066400000000000000000000001751475414346200164440ustar00rootroot00000000000000include CHANGELOG.rst LICENSE README.rst recursive-include tests *.py recursive-include docs Makefile conf.py make.bat *.rst django-storages-1.14.5/README.rst000066400000000000000000000055001475414346200163720ustar00rootroot00000000000000.. image:: https://raw.githubusercontent.com/jschneier/django-storages/master/docs/logos/horizontal.png :alt: Django-Storages :width: 100% .. image:: https://img.shields.io/pypi/v/django-storages.svg :target: https://pypi.org/project/django-storages/ :alt: PyPI Version .. image:: https://github.com/jschneier/django-storages/actions/workflows/ci.yml/badge.svg :target: https://github.com/jschneier/django-storages/actions/workflows/ci.yml :alt: Build Status Installation ============ Installing from PyPI is as easy as doing: .. code-block:: bash pip install django-storages If you'd prefer to install from source (maybe there is a bugfix in master that hasn't been released yet) then the magic incantation you are looking for is: .. code-block:: bash pip install -e 'git+https://github.com/jschneier/django-storages.git#egg=django-storages' For detailed instructions on how to configure the backend of your choice please consult the documentation. About ===== django-storages is a project to provide a variety of storage backends in a single library. This library is usually compatible with the currently supported versions of Django. Check the Trove classifiers in setup.py to be sure. django-storages is backed in part by `Tidelift`_. Check them out for all of your enterprise open source software commercial support needs. .. _Tidelift: https://tidelift.com/subscription/pkg/pypi-django-storages?utm_source=pypi-django-storages&utm_medium=referral&utm_campaign=enterprise&utm_term=repo Security ======== To report a security vulnerability, please use the `Tidelift security contact`_. Tidelift will coordinate the fix and disclosure. Please **do not** post a public issue on the tracker. .. _Tidelift security contact: https://tidelift.com/security Found a Bug? ============ Issues are tracked via GitHub issues at the `project issue page `_. Documentation ============= Documentation for django-storages is located at https://django-storages.readthedocs.io/. Contributing ============ #. `Check for open issues `_ at the project issue page or open a new issue to start a discussion about a feature or bug. #. Fork the `django-storages repository on GitHub `_ to start making changes. #. Add a test case to show that the bug is fixed or the feature is implemented correctly. #. Bug me until I can merge your pull request. Please don't update the library version in CHANGELOG.rst or ``storages/__init__.py``, the maintainer will do that on release. History ======= This repo began as a fork of the original library under the package name of django-storages-redux and became the official successor (releasing under django-storages on PyPI) in February of 2016. django-storages-1.14.5/docs/000077500000000000000000000000001475414346200156335ustar00rootroot00000000000000django-storages-1.14.5/docs/Makefile000066400000000000000000000110221475414346200172670ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/django-storages.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/django-storages.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/django-storages" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/django-storages" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." make -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." django-storages-1.14.5/docs/backends/000077500000000000000000000000001475414346200174055ustar00rootroot00000000000000django-storages-1.14.5/docs/backends/amazon-S3.rst000066400000000000000000000266741475414346200217260ustar00rootroot00000000000000Amazon S3 ========= This backend implements the Django File Storage API for Amazon Web Services's (AWS) Simple Storage Service (S3). Installation ------------ The backend is based on the boto3 library which must be installed; the minimum required version is 1.4.4 although we always recommend the most recent. Either add it to your requirements or use the optional ``s3`` extra e.g:: pip install django-storages[s3] Configuration & Settings ------------------------ Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to S3 on Django >= 4.2 you'd define:: STORAGES = { "default": { "BACKEND": "storages.backends.s3.S3Storage", "OPTIONS": { ...your_options_here }, }, } On Django < 4.2 you'd instead define:: DEFAULT_FILE_STORAGE = "storages.backends.s3.S3Storage" To put static files on S3 via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: STATICFILES_STORAGE = "storages.backends.s3.S3Storage" The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. Authentication Settings ~~~~~~~~~~~~~~~~~~~~~~~ There are several different methods for specifying the AWS credentials used to create the S3 client. In the order that ``S3Storage`` searches for them: #. ``session_profile`` or ``AWS_S3_SESSION_PROFILE`` #. ``access_key`` or ``AWS_S3_ACCESS_KEY_ID`` or ``AWS_ACCESS_KEY_ID`` #. ``secret_key`` or ``AWS_S3_SECRET_ACCESS_KEY`` or ``AWS_SECRET_ACCESS_KEY`` #. ``security_token`` or ``AWS_SESSION_TOKEN`` or ``AWS_SECURITY_TOKEN`` #. The environment variables AWS_S3_ACCESS_KEY_ID and AWS_S3_SECRET_ACCESS_KEY #. The environment variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY #. The environment variables AWS_SESSION_TOKEN and AWS_SECURITY_TOKEN #. Use Boto3's default session Settings ~~~~~~~~ ``bucket_name`` or ``AWS_STORAGE_BUCKET_NAME`` **Required** The name of the S3 bucket that will host the files. ``object_parameters`` or ``AWS_S3_OBJECT_PARAMETERS`` Default: ``{}`` Use this to set parameters on all objects. To set these on a per-object basis, subclass the backend and override ``S3Storage.get_object_parameters``. To view a full list of possible parameters (there are many) see the `Boto3 docs for uploading files`_; an incomplete list includes: ``CacheControl``, ``SSEKMSKeyId``, ``StorageClass``, ``Tagging`` and ``Metadata``. ``default_acl`` or ``AWS_DEFAULT_ACL`` Default: ``None`` - the file will be ``private`` per Amazon's default Use this to set an ACL on your file such as ``public-read``. If not set the file will be ``private`` per Amazon's default. If the ``ACL`` parameter is set in ``object_parameters``, then this setting is ignored. Options such as ``public-read`` and ``private`` come from the `list of canned ACLs`_. ``querystring_auth`` or ``AWS_QUERYSTRING_AUTH`` Default: ``True`` Setting ``AWS_QUERYSTRING_AUTH`` to ``False`` to remove query parameter authentication from generated URLs. This can be useful if your S3 buckets are public. ``max_memory_size`` or ``AWS_S3_MAX_MEMORY_SIZE`` Default: ``0`` i.e do not roll over The maximum amount of memory (in bytes) a file can take up before being rolled over into a temporary file on disk. ``querystring_expire`` or ``AWS_QUERYSTRING_EXPIRE`` Default: ``3600`` The number of seconds that a generated URL is valid for. ``url_protocol`` or ``AWS_S3_URL_PROTOCOL`` Default: ``https:`` The protocol to use when constructing a custom domain, ``custom_domain`` must be ``True`` for this to have any effect. .. note:: Must end in a ``:`` ``file_overwrite`` or ``AWS_S3_FILE_OVERWRITE`` Default: ``True`` By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. ``location`` or ``AWS_LOCATION`` Default: ``''`` A path prefix that will be prepended to all uploads. ``gzip`` or ``AWS_IS_GZIPPED`` Default: ``False`` Whether or not to enable gzipping of content types specified by ``gzip_content_types``. ``gzip_content_types`` or ``GZIP_CONTENT_TYPES`` Default: ``(text/css,text/javascript,application/javascript,application/x-javascript,image/svg+xml)`` The list of content types to be gzipped when ``gzip`` is ``True``. ``region_name`` or ``AWS_S3_REGION_NAME`` Default: ``None`` Name of the AWS S3 region to use (eg. eu-west-1) ``use_ssl`` or ``AWS_S3_USE_SSL`` Default: ``True`` Whether or not to use SSL when connecting to S3, this is passed to the boto3 session resource constructor. ``verify`` or ``AWS_S3_VERIFY`` Default: ``None`` Whether or not to verify the connection to S3. Can be set to False to not verify certificates or a path to a CA cert bundle. ``endpoint_url`` or ``AWS_S3_ENDPOINT_URL`` Default: ``None`` Custom S3 URL to use when connecting to S3, including scheme. Overrides ``region_name`` and ``use_ssl``. To avoid ``AuthorizationQueryParametersError`` errors, ``region_name`` should also be set. ``addressing_style`` or ``AWS_S3_ADDRESSING_STYLE`` Default: ``None`` Possible values ``virtual`` and ``path``. ``proxies`` or ``AWS_S3_PROXIES`` Default: ``None`` Dictionary of proxy servers to use by protocol or endpoint, e.g.:: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. ``transfer_config`` or ``AWS_S3_TRANSFER_CONFIG`` Default: ``None`` Set this to customize the transfer config options such as disabling threads for ``gevent`` compatibility; See the `Boto3 docs for TransferConfig`_ for more info. ``custom_domain`` or ``AWS_S3_CUSTOM_DOMAIN`` Default: ``None`` Set this to specify a custom domain for constructed URLs. .. note:: You'll have to configure CloudFront to use the bucket as an origin for this to work. If your CloudFront config restricts viewer access you will also need to provide ``cloudfront_key`` / ``AWS_CLOUDFRONT_KEY`` and ``cloudfront_key_id`` / ``AWS_CLOUDFRONT_KEY_ID``; See those settings and :ref:`cloudfront-signed-url-header` for more info. If you have more than one storage with different viewer access permissions, you can provide ``cloudfront_signer=None`` to disable signing on one or more storages. .. warning:: Django’s STATIC_URL must end in a slash and this must not. It is best to set this variable independently of STATIC_URL. ``cloudfront_key`` or ``AWS_CLOUDFRONT_KEY`` Default: ``None`` A private PEM encoded key to use in a ``boto3`` ``CloudFrontSigner``; See :ref:`cloudfront-signed-url-header` for more info. ``cloudfront_key_id`` or ``AWS_CLOUDFRONT_KEY_ID`` Default: ``None`` The AWS key ID for the private key provided with ``cloudfront_key`` / ``AWS_CLOUDFRONT_KEY``; See :ref:`cloudfront-signed-url-header` for more info. ``cloudfront_signer`` Default: omitted By default the ``cloudfront_signer`` is generated based on the CloudFront key and ID provided. If both are provided URLs will be signed and will work for distributions with restricted viewer access, but if neither are provided then URLs will not be signed and will work for distributions with unrestricted viewer access. If you require a custom CloudFront signer you may pass a ``boto3`` ``CloudFrontSigner`` instance that can sign URLs, and to disable signing you may pass ``None``. ``signature_version`` or ``AWS_S3_SIGNATURE_VERSION`` Default: ``None`` The default signature version is ``s3v4``. Set this to ``s3`` to use the legacy signing scheme (aka ``v2``). Note that only certain regions support that version. You can check to see if your region is one of them in the `S3 region list`_. .. warning:: The signature versions are not backwards compatible so be careful about url endpoints if making this change for legacy projects. ``client_config`` or ``AWS_S3_CLIENT_CONFIG`` Default: ``None`` An instance of ``botocore.config.Config`` to do advanced configuration of the client such as ``max_pool_connections``. See all options in the `Botocore docs`_. .. note:: Setting this overrides the settings for ``addressing_style``, ``signature_version`` and ``proxies``. Include them as arguments to your ``botocore.config.Config`` class if you need them. .. _AWS Signature Version 4: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html .. _S3 region list: https://docs.aws.amazon.com/general/latest/gr/s3.html#s3_region .. _list of canned ACLs: https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl .. _Boto3 docs for uploading files: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.put_object .. _Boto3 docs for TransferConfig: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/customizations/s3.html#boto3.s3.transfer.TransferConfig .. _ManifestStaticFilesStorage: https://docs.djangoproject.com/en/3.1/ref/contrib/staticfiles/#manifeststaticfilesstorage .. _Botocore docs: https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html#botocore.config.Config .. _cloudfront-signed-url-header: CloudFront Signed URLs ---------------------- If you want to generate signed Cloudfront URLs, you can do so by following these steps: #. Generate a CloudFront Key Pair as specified in the `AWS docs`_. #. Add ``cloudfront_key`` and ``cloudfront_key_id`` as above with the generated settings #. Install one of `cryptography`_ or `rsa`_ #. Set both ``cloudfront_key_id/AWS_CLOUDFRONT_KEY_ID`` and ``cloudfront_key/AWS_CLOUDFRONT_KEY`` django-storages will now generate `signed cloudfront urls`_. .. _AWS docs: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-trusted-signers.html#private-content-creating-cloudfront-key-pairs-procedure .. _signed cloudfront urls: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-signed-urls.html .. _cryptography: https://pypi.org/project/cryptography/ .. _rsa: https://pypi.org/project/rsa/ IAM Policy ---------- The IAM policy definition needed for the most common use case is: .. code-block:: json { "Version": "2012-10-17", "Statement": [ { "Sid": "VisualEditor0", "Effect": "Allow", "Action": [ "s3:PutObject", "s3:GetObjectAcl", "s3:GetObject", "s3:ListBucket", "s3:DeleteObject", "s3:PutObjectAcl" ], "Principal": { "AWS": "arn:aws:iam::example-AWS-account-ID:user/example-user-name" }, "Resource": [ "arn:aws:s3:::example-bucket-name/*", "arn:aws:s3:::example-bucket-name" ] } ] } For more information about Principal, please refer to `AWS JSON Policy Elements`_ .. _AWS JSON Policy Elements: https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html django-storages-1.14.5/docs/backends/apache_libcloud.rst000066400000000000000000000143171475414346200232430ustar00rootroot00000000000000Apache Libcloud =============== `Apache Libcloud`_ is an API wrapper around a range of cloud storage providers. It aims to provide a consistent API for dealing with cloud storage (and, more broadly, the many other services provided by cloud providers, such as device provisioning, load balancer configuration, and DNS configuration). Use pip to install apache-libcloud from PyPI:: pip install apache-libcloud As of v0.10.1, Libcloud supports the following cloud storage providers: * `Amazon S3`_ * `Google Cloud Storage`_ * `Nimbus.io`_ * `Ninefold Cloud Storage`_ * `Rackspace CloudFiles`_ Libcloud can also be configured with relatively little effort to support any provider using EMC Atmos storage, or the OpenStack API. .. _Apache Libcloud: http://libcloud.apache.org/ .. _Amazon S3: http://aws.amazon.com/s3/ .. _Google Cloud Storage: http://cloud.google.com/products/cloud-storage.html .. _Rackspace CloudFiles: http://www.rackspace.com/cloud/cloud_hosting_products/files/ .. _Ninefold Cloud Storage: http://ninefold.com/cloud-storage/ .. _Nimbus.io: http://nimbus.io Settings -------- ``LIBCLOUD_PROVIDERS`` ~~~~~~~~~~~~~~~~~~~~~~ This setting is required to configure connections to cloud storage providers. Each entry corresponds to a single 'bucket' of storage. You can have multiple buckets for a single service provider (e.g., multiple S3 buckets), and you can define buckets at multiple providers. For example, the following configuration defines 3 providers: two buckets (``bucket-1`` and ``bucket-2``) on a US-based Amazon S3 store, and a third bucket (``bucket-3``) on Google:: LIBCLOUD_PROVIDERS = { 'amazon_1': { 'type': 'libcloud.storage.types.Provider.S3_US_STANDARD_HOST', 'user': '', 'key': '', 'bucket': 'bucket-1', }, 'amazon_2': { 'type': 'libcloud.storage.types.Provider.S3_US_STANDARD_HOST', 'user': '', 'key': '', 'bucket': 'bucket-2', }, 'google': { 'type': 'libcloud.storage.types.Provider.GOOGLE_STORAGE', 'user': '', 'key': '', 'bucket': 'bucket-3', }, } The values for the ``type``, ``user`` and ``key`` arguments will vary depending on your storage provider: **Amazon S3**: **type**: ``libcloud.storage.types.Provider.S3_US_STANDARD_HOST``, **user**: Your AWS access key ID **key**: Your AWS secret access key If you want to use a availability zone other than the US default, you can use one of ``S3_US_WEST_HOST``, ``S3_US_WEST_OREGON_HOST``, ``S3_EU_WEST_HOST``, ``S3_AP_SOUTHEAST_HOST``, or ``S3_AP_NORTHEAST_HOST`` instead of ``S3_US_STANDARD_HOST``. **Google Cloud Storage**: **type**: ``libcloud.storage.types.Provider.GOOGLE_STORAGE``, **user**: Your Google APIv1 username (20 characters) **key**: Your Google APIv1 key **Nimbus.io**: **type**: ``libcloud.storage.types.Provider.NIMBUS``, **user**: Your Nimbus.io user ID **key**: Your Nimbus.io access key **Ninefold Cloud Storage**: **type**: ``libcloud.storage.types.Provider.NINEFOLD``, **user**: Your Atmos Access Token **key**: Your Atmos Shared Secret **Rackspace Cloudfiles**: **type**: ``libcloud.storage.types.Provider.CLOUDFIULES_US`` or ``libcloud.storage.types.Provider.CLOUDFIULES_UK``, **user**: Your Rackspace user ID **key**: Your Rackspace access key You can specify any bucket name you want; however, the bucket must exist before you can start using it. If you need to create the bucket, you can use the storage API. For example, to create ``bucket-1`` from our previous example:: >>> from storages.backends.apache_libcloud import LibCloudStorage >>> store = LibCloudStorage('amazon_1') >>> store.driver.create_container('bucket-1') ``DEFAULT_LIBCLOUD_PROVIDER`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Once you have defined your Libcloud providers, you have the option of setting one provider as the default provider of Libcloud storage. This is done setting ``DEFAULT_LIBCLOUD_PROVIDER`` to the key in ``LIBCLOUD_PROVIDER`` that you want to use as the default provider. For example, if you want the ``amazon-1`` provider to be the default provider, use:: DEFAULT_LIBCLOUD_PROVIDER = 'amazon-1' If ``DEFAULT_LIBCLOUD_PROVIDER`` isn't set, the Libcloud backend will assume that the default storage backend is named ``default``. Therefore, you can avoid settings DEFAULT_LIBCLOUD_PROVIDER by simply naming one of your Libcloud providers ``default``:: LIBCLOUD_PROVIDERS = { 'default': { 'type': ... }, } ``DEFAULT_FILE_STORAGE``, ``STORAGES`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If you want your Libcloud storage to be the default Django file store, you can set:: # django < 4.2 DEFAULT_FILE_STORAGE = 'storages.backends.apache_libcloud.LibCloudStorage' # django >= 4.2 STORAGES = {"default": {"BACKEND": "storages.backends.apache_libcloud.LibCloudStorage"}} Your default Libcloud provider will be used as the file store. Certificate authorities ----------------------- Libcloud uses HTTPS connections, and in order to validate that these HTTPS connections are correctly signed, root CA certificates must be present. On some platforms (most notably, OS X and Windows), the required certificates may not be available by default. To test >>> from storages.backends.apache_libcloud import LibCloudStorage >>> store = LibCloudStorage('amazon_1') Traceback (most recent call last): ... ImproperlyConfigured: Unable to create libcloud driver type libcloud.storage.types.Provider.S3_US_STANDARD_HOST: No CA Certificates were found in CA_CERTS_PATH. If you get this error, you need to install a certificate authority. `Download a certificate authority file`_, and then put the following two lines into your settings.py:: import libcloud.security libcloud.security.CA_CERTS_PATH.append("/path/to/your/cacerts.pem") .. _Download a certificate authority file: http://curl.haxx.se/ca/cacert.pem django-storages-1.14.5/docs/backends/azure.rst000066400000000000000000000246521475414346200212760ustar00rootroot00000000000000Azure Storage ============= A custom storage system for Django using Microsoft Azure Storage backend. Installation ------------ Install Azure SDK:: pip install django-storages[azure] Configuration & Settings ------------------------ Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to Azure on Django >= 4.2 you'd define:: STORAGES = { "default": { "BACKEND": "storages.backends.azure_storage.AzureStorage", "OPTIONS": { ...your_options_here }, }, } On Django < 4.2 you'd instead define:: DEFAULT_FILE_STORAGE = "storages.backends.azure_storage.AzureStorage" To put static files on Azure via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: STATICFILES_STORAGE = "storages.backends.azure_storage.AzureStorage" The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. Authentication Settings ~~~~~~~~~~~~~~~~~~~~~~~ Several different methods of authentication are provided. In order of precedence they are: #. ``connection_string`` or ``AZURE_CONNECTION_STRING`` (see `Connection string docs `_) #. (``account_key`` or ``AZURE_ACCOUNT_KEY``) and (``account_name`` or ``AZURE_ACCOUNT_NAME``) #. ``token_credential`` or ``AZURE_TOKEN_CREDENTIAL`` with ``account_name`` or ``AZURE_ACCOUNT_NAME`` #. ``sas_token`` or ``AZURE_SAS_TOKEN`` Using Managed Identity ++++++++++++++++++++++ `Azure Managed Identity `_ is an authentication method that allows you to authenticate to Azure services without storing credentials in your code. Managed Identity is the recommended mechanism for password-less authentication to Azure Storage Accounts from other Azure services like App Services, Functions, Container Apps, and VMs. To use Managed Identity you will need to configure a System Assigned Managed Identity or a User Assigned Managed Identity for your app service. Then you can use the `DefaultAzureCredential `_ class from the Azure SDK to authenticate. This class will automatically try all the available authentication methods in the order of precedence. ``DefaultAzureCredential`` will also use environment variables for local development, or VS Code Azure Login if available. This `guide `_ contains more information on assigning roles to Storage Accounts. Before using Managed Identity, you will need to install the Azure Identity package:: pip install azure-identity After creating the containers in the Azure Storage Account, you can configure Managed Identity in Django settings. Import ``DefaultAzureCredential`` from ``azure.identity`` to use it for the ``token_credential`` property:: from azure.identity import DefaultAzureCredential ... STORAGES = { "default": { "BACKEND": "storages.backends.azure_storage.AzureStorage", "OPTIONS": { "token_credential": DefaultAzureCredential(), "account_name": "mystorageaccountname", "azure_container": "media", }, }, "staticfiles": { "BACKEND": "storages.backends.azure_storage.AzureStorage", "OPTIONS": { "token_credential": DefaultAzureCredential(), "account_name": "mystorageaccountname", "azure_container": "static", }, }, } For `User assigned Managed Identity `_, pass the client ID parameter to the DefaultAzureCredential call. Settings ~~~~~~~~ ``azure_container`` or ``AZURE_CONTAINER`` **Required** This is where the files uploaded through Django will be uploaded. The container must be already created, since the storage system will not attempt to create it. ``azure_ssl`` or ``AZURE_SSL`` Default: ``True`` Set a secure connection (HTTPS), otherwise it makes an insecure connection (HTTP). ``upload_max_conn`` or ``AZURE_UPLOAD_MAX_CONN`` Default: ``2`` Number of connections to make when uploading a single file. ``timeout`` or ``AZURE_CONNECTION_TIMEOUT_SECS`` Default: ``20`` Global connection timeout in seconds. ``max_memory_size`` or ``AZURE_BLOB_MAX_MEMORY_SIZE`` Default: ``2*1024*1024`` i.e ``2MB`` Maximum memory used by a downloaded file before dumping it to disk in bytes. ``expiration_secs`` or ``AZURE_URL_EXPIRATION_SECS`` Default: ``None`` Seconds before a URL expires, set to ``None`` to never expire it. Be aware the container must have public read permissions in order to access a URL without expiration date. ``overwrite_files`` or ``AZURE_OVERWRITE_FILES`` Default: ``False`` Whether or not to overwrite a file previously uploaded with the same name. If not, random character are appended. ``location`` or ``AZURE_LOCATION`` Default: ``''`` Default location for the uploaded files. This is a path that gets prepended to every file name. ``endpoint_suffix`` or ``AZURE_ENDPOINT_SUFFIX`` Default: ``core.windows.net`` Use ``core.chinacloudapi.cn`` for azure.cn accounts. ``custom_domain`` or ``AZURE_CUSTOM_DOMAIN`` Default: ``None`` The custom domain to use for generating URLs for files. For example, ``www.mydomain.com`` or ``mycdn.azureedge.net``. ``AZURE_TOKEN_CREDENTIAL`` A token credential used to authenticate HTTPS requests. The token value should be updated before its expiration. ``cache_control`` or ``AZURE_CACHE_CONTROL`` Default: ``None`` A variable to set the Cache-Control HTTP response header. E.g.:: cache_control: "public,max-age=31536000,immutable" ``object_parameters`` or ``AZURE_OBJECT_PARAMETERS`` Default: ``{}`` Use this to set content settings on all objects. To set these on a per-object basis, subclass the backend and override ``AzureStorage.get_object_parameters``. This is a Python ``dict`` and the possible parameters are: ``content_type``, ``content_encoding``, ``content_language``, ``content_disposition``, ``cache_control``, and ``content_md5``. ``client_options`` or ``AZURE_CLIENT_OPTIONS`` Default: ``{}`` A dict of kwarg options to send to the ``BlobServiceClient``. A partial list of options can be found `in the client docs `__. Additionally, this setting can be used to configure the client retry settings. To see how follow the `Python retry docs `__. ``api_version`` or ``AZURE_API_VERSION`` Default: ``None`` **Note: This option is deprecated. Use client_options/AZURE_CLIENT_OPTIONS instead.** The Azure Storage API version to use. Default value is the most recent service version that is compatible with the current SDK. Setting to an older version may result in reduced feature compatibility. Using with Azurite (previously Azure Storage Emulator) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Azurite is a local emulator for Azure Storage accounts that emulates the API for Azure Blob storage and enables local testing and development without an Azure account, free of charge. To use the Azure Storage Emulator, you download and install it from the `Azurite page `_. Copy the default `connection string `_ and set it in your settings:: STORAGES = { "default": { "BACKEND": "storages.backends.azure_storage.AzureStorage", "OPTIONS": { "connection_string": "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "azure_container": "media", }, }, "staticfiles": { "BACKEND": "storages.backends.azure_storage.AzureStorage", "OPTIONS": { "connection_string": "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", "azure_container": "static", }, }, } Django Storages will not create containers if they don't exist, so you will need to create any storage containers using the Azurite CLI or the Azure Storage Explorer. Additional Notes ---------------- Filename Restrictions ~~~~~~~~~~~~~~~~~~~~~ Azure file names have some extra restrictions. They can't: - end with a dot (``.``) or slash (``/``) - contain more than 256 slashes (``/``) - be longer than 1024 characters Private vs Public URLs ~~~~~~~~~~~~~~~~~~~~~~ The difference between public and private URLs is that private includes the SAS token. With private URLs you can override certain properties stored for the blob by specifying query parameters as part of the shared access signature. These properties include the cache-control, content-type, content-encoding, content-language, and content-disposition. See https://docs.microsoft.com/rest/api/storageservices/set-blob-properties#remarks You can specify these parameters by:: az_storage = AzureStorage() az_url = az_storage.url(blob_name, parameters={'content_type': 'text/html;'}) django-storages-1.14.5/docs/backends/dropbox.rst000066400000000000000000000105251475414346200216170ustar00rootroot00000000000000Dropbox ======= A Django files storage using Dropbox as a backend via the official `Dropbox SDK for Python`_. Currently only v2 of the API is supported. Installation ------------ Before you start configuration, you will need to install the SDK which can be done for you automatically by doing:: pip install django-storages[dropbox] Configuration & Settings ------------------------ Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to Dropbox on Django >= 4.2 you'd define:: STORAGES = { "default": { "BACKEND": "storages.backends.dropbox.DropboxStorage", "OPTIONS": { ...your_options_here }, }, } On Django < 4.2 you'd instead define:: DEFAULT_FILE_STORAGE = "storages.backends.dropbox.DropboxStorage" To put static files on Dropbox via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: STATICFILES_STORAGE = "storages.backends.dropbox.DropboxStorage" The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. Authentication -------------- Two methods of authentication are supported: #. Using an access token #. Using a refresh token with an app key and secret Dropbox has recently introduced short-lived access tokens only, and does not seem to allow new apps to generate access tokens that do not expire. Short-lived access tokens can be indentified by their prefix (short-lived access tokens start with ``'sl.'``). You can manually obtain the refresh token by following the instructions below using ``APP_KEY`` and ``APP_SECRET``. The relevant settings which can all be obtained by following the instructions in the `tutorial`_: #. ``oauth2_access_token`` or ``DROPBOX_OAUTH2_TOKEN`` #. ``oauth2_refresh_token`` or ``DROPBOX_OAUTH2_REFRESH_TOKEN`` #. ``app_secret`` or ``DROPBOX_APP_SECRET`` #. ``app_key`` or ``DROPBOX_APP_KEY`` The refresh token can be obtained using the `commandline-oauth.py`_ example from the `Dropbox SDK for Python`_. Get AUTHORIZATION_CODE ~~~~~~~~~~~~~~~~~~~~~~ Using your ``APP_KEY`` follow the link: https://www.dropbox.com/oauth2/authorize?client_id=APP_KEY&token_access_type=offline&response_type=code It will give you ``AUTHORIZATION_CODE``. Obtain the refresh token ~~~~~~~~~~~~~~~~~~~~~~~~ Usinh your ``APP_KEY``, ``APP_SECRET`` and ``AUTHORIZATION_KEY`` obtain the refresh token. .. code-block:: shell curl -u APP_KEY:APP_SECRET \ -d "code=AUTHORIZATION_CODE&grant_type=authorization_code" \ -H "Content-Type: application/x-www-form-urlencoded" \ -X POST "https://api.dropboxapi.com/oauth2/token" The response would be: .. code-block:: json { "access_token": "sl.************************", "token_type": "bearer", "expires_in": 14400, "refresh_token": "************************", <-- your REFRESH_TOKEN "scope": , "uid": "************************", "account_id": "dbid:************************" } Settings -------- ``root_path`` or ``DROPBOX_ROOT_PATH`` Default: ``'/'`` Path which will prefix all uploaded files. Must begin with a ``/``. ``timeout`` or ``DROPBOX_TIMEOUT`` Default: ``100`` Timeout in seconds for requests to the API. If ``None``, the client will wait forever. The default value matches the SDK at the time of this writing. ``write_mode`` or ``DROPBOX_WRITE_MODE`` Default: ``'add'`` Sets the Dropbox WriteMode strategy. Read more in the `official docs`_. .. _`tutorial`: https://www.dropbox.com/developers/documentation/python#tutorial .. _`Dropbox SDK for Python`: https://www.dropbox.com/developers/documentation/python#tutorial .. _`official docs`: https://dropbox-sdk-python.readthedocs.io/en/latest/api/files.html#dropbox.files.WriteMode .. _`commandline-oauth.py`: https://github.com/dropbox/dropbox-sdk-python/blob/master/example/oauth/commandline-oauth.py django-storages-1.14.5/docs/backends/ftp.rst000066400000000000000000000043061475414346200207330ustar00rootroot00000000000000FTP === .. warning:: This FTP storage is not prepared to work with large files, because it uses memory for temporary data storage. It also does not close FTP connection automatically (but open it lazy and try to reestablish when disconnected). This implementation was done preliminary for upload files in admin to remote FTP location and read them back on site by HTTP. It was tested mostly in this configuration, so read/write using FTPStorageFile class may break. Configuration & Settings ------------------------ Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend because the settings were global, now you pass them under the key ``OPTIONS``. For example, to use FTP to save media files on Django >= 4.2 you'd define:: STORAGES = { "default": { "BACKEND": "storages.backends.ftp.FTPStorage", "OPTIONS": { ...your_options_here }, }, } On Django < 4.2 you'd instead define:: DEFAULT_FILE_STORAGE = "storages.backends.ftp.FTPStorage" To use FTP to store static files via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: STATICFILES_STORAGE = "storages.backends.ftp.FTPStorage" The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. Settings ~~~~~~~~ ``location`` or ``FTP_STORAGE_LOCATION`` **Required** Format as a url like ``"{scheme}://{user}:{passwd}@{host}:{port}/"``. Supports both FTP and FTPS connections via scheme. ``allow_overwrite`` or ``FTP_ALLOW_OVERWRITE`` default: ``False`` Set to ``True`` to overwrite files instead of appending additional characters. ``encoding`` or ``FTP_STORAGE_ENCODING`` default: ``latin-1`` File encoding. ``base_url`` or ``BASE_URL`` default: ``settings.MEDIA_URL`` Serving base of files. django-storages-1.14.5/docs/backends/gcloud.rst000066400000000000000000000201451475414346200214160ustar00rootroot00000000000000Google Cloud Storage ==================== This backend provides Django File API for `Google Cloud Storage `_ using the Python library provided by Google. Installation ------------ Use pip to install from PyPI:: pip install django-storages[google] Configuration & Settings ------------------------ Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to GCS on Django >= 4.2 you'd define:: STORAGES = { "default": { "BACKEND": "storages.backends.gcloud.GoogleCloudStorage", "OPTIONS": { ...your_options_here }, }, } On Django < 4.2 you'd instead define:: DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage" To put static files on GCS via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: STATICFILES_STORAGE = "storages.backends.gcloud.GoogleCloudStorage" The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. Authentication Settings ~~~~~~~~~~~~~~~~~~~~~~~ By default, this library will try to use the credentials associated with the current Google Cloud infrastructure/environment for authentication. In most cases, the default service accounts are not sufficient to read/write and sign files in GCS, so you will need to create a dedicated service account: #. Create a service account. (`Google Getting Started Guide `__) #. Make sure your service account has access to the bucket and appropriate permissions. (`Using IAM Permissions `__) #. Ensure this service account is associated to the type of compute being used (Google Compute Engine (GCE), Google Kubernetes Engine (GKE), Google Cloud Run (GCR), etc) For development use cases, or other instances outside Google infrastructure: #. Create the key and download ``your-project-XXXXX.json`` file. #. Ensure the key is mounted/available to your running Django app. #. Set an environment variable of GOOGLE_APPLICATION_CREDENTIALS to the path of the json file. Alternatively, you can use the setting ``credentials`` or ``GS_CREDENTIALS`` as described below. Settings ~~~~~~~~ ``bucket_name`` or ``GS_BUCKET_NAME`` **Required** The name of the GCS bucket that will host the files. ``project_id`` or ``GS_PROJECT_ID`` default: ``None`` Your Google Cloud project ID. If unset, falls back to the default inferred from the environment. ``gzip`` or ``GS_IS_GZIPPED`` default: ``False`` Whether or not to enable gzipping of content types specified by ``gzip_content_types``. ``gzip_content_types`` or ``GZIP_CONTENT_TYPES`` default: ``(text/css,text/javascript,application/javascript,application/x-javascript,image/svg+xml)`` The list of content types to be gzipped when ``gzip`` is ``True``. .. _gs-creds: ``credentials`` or ``GS_CREDENTIALS`` default: ``None`` The OAuth 2 credentials to use for the connection. If unset, falls back to the default inferred from the environment (i.e. ``GOOGLE_APPLICATION_CREDENTIALS``):: from google.oauth2 import service_account GS_CREDENTIALS = service_account.Credentials.from_service_account_file( "path/to/credentials.json" ) .. _gs-default-acl: ``default_acl`` or ``GS_DEFAULT_ACL`` default: ``None`` ACL used when creating a new blob, from the `list of predefined ACLs `_. (A "JSON API" ACL is preferred but an "XML API/gsutil" ACL will be translated.) For most cases, the blob will need to be set to the ``publicRead`` ACL in order for the file to be viewed. If ``default_acl`` is not set, the blob will have the default permissions set by the bucket. ``publicRead`` files will return a public, non-expiring url. All other files return a signed (expiring) url. .. note:: GS_DEFAULT_ACL must be set to 'publicRead' to return a public url. Even if you set the bucket to public or set the file permissions directly in GCS to public. .. note:: When using this setting, make sure you have ``fine-grained`` access control enabled on your bucket, as opposed to ``Uniform`` access control, or else, file uploads will return with HTTP 400. If you already have a bucket with ``Uniform`` access control set to public read, please keep ``GS_DEFAULT_ACL`` to ``None`` and set ``GS_QUERYSTRING_AUTH`` to ``False``. ``querystring_auth`` or ``GS_QUERYSTRING_AUTH`` default: ``True`` If set to ``False`` it forces the url not to be signed. This setting is useful if you need to have a bucket configured with ``Uniform`` access control configured with public read. In that case you should force the flag ``GS_QUERYSTRING_AUTH = False`` and ``GS_DEFAULT_ACL = None`` ``file_overwrite`` or ``GS_FILE_OVERWRITE`` default: ``True`` By default files with the same name will overwrite each other. Set this to ``False`` to have extra characters appended. ``max_memory_size`` or ``GS_MAX_MEMORY_SIZE`` default: ``0`` i.e do not rollover The maximum amount of memory a returned file can take up (in bytes) before being rolled over into a temporary file on disk. Default is 0: Do not roll over. ``blob_chunk_size`` or ``GS_BLOB_CHUNK_SIZE`` default: ``None`` The size of blob chunks that are sent via resumable upload. If this is not set then the generated request must fit in memory. Recommended if you are going to be uploading large files. .. note:: This must be a multiple of 256K (1024 * 256) ``object_parameters`` or ``GS_OBJECT_PARAMETERS`` default: `{}` Dictionary of key-value pairs mapping from blob property name to value. Use this to set parameters on all objects. To set these on a per-object basis, subclass the backend and override ``GoogleCloudStorage.get_object_parameters``. The valid property names are :: acl cache_control content_disposition content_encoding content_language content_type metadata storage_class If not set, the ``content_type`` property will be guessed. If set, ``acl`` overrides :ref:`GS_DEFAULT_ACL `. .. warning:: Do not set ``name``. This is set automatically based on the filename. ``custom_endpoint`` or ``GS_CUSTOM_ENDPOINT`` default: ``None`` Sets a `custom endpoint `_, that will be used instead of ``https://storage.googleapis.com`` when generating URLs for files. ``location`` or ``GS_LOCATION`` default: ``''`` Subdirectory in which files will be stored. ``expiration`` or ``GS_EXPIRATION`` default: ``timedelta(seconds=86400)``) The time that a generated URL is valid before expiration. The default is 1 day. Public files will return a url that does not expire. Files will be signed by the credentials provided to django-storages (See :ref:`GS Credentials `). Note: Default Google Compute Engine (GCE) Service accounts are `unable to sign urls `_. The ``expiration`` value is handled by the underlying `Google library `_. It supports `timedelta`, `datetime`, or `integer` seconds since epoch time. Note: The maximum value for this option is 7 days (604800 seconds) in version `v4` (See this `Github issue `_) django-storages-1.14.5/docs/backends/s3_compatible/000077500000000000000000000000001475414346200221315ustar00rootroot00000000000000django-storages-1.14.5/docs/backends/s3_compatible/backblaze-B2.rst000066400000000000000000000017421475414346200250460ustar00rootroot00000000000000Backblaze B2 ============ Backblaze B2 implements an `S3 Compatible API `_. To use it as a django-storages backend: #. Sign up for a `Backblaze B2 account `_, if you have not already done so. #. Create a public or private bucket. Note that object-level ACLs are not supported by B2 - all objects inherit their bucket's ACLs. #. Create an `application key `_. Best practice is to limit access to the bucket you just created. #. Follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the following exceptions: * Set ``region_name`` to your Backblaze B2 region, for example, ``us-west-004`` * Set ``endpoint_url`` to ``https://s3.${AWS_S3_REGION_NAME}.backblazeb2.com`` * Set the values of ``access_key`` and ``secret_key`` to the application key id and application key you created in step 2. django-storages-1.14.5/docs/backends/s3_compatible/cloudflare-r2.rst000066400000000000000000000015311475414346200253240ustar00rootroot00000000000000Cloudflare R2 ============= Cloudflare R2 implements an `S3 Compatible API `_. To use it as a django-storages backend: #. Create an R2 bucket using Cloudflare's web panel or API #. Follow `Cloudflare's docs`_ to create authentication tokens, locking down permissions as required #. Follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the following exceptions: * Set ``bucket_name`` to your previously created bucket * Set ``endpoint_url`` to ``https://.r2.cloudflarestorage.com`` * Set the values of ``access_key`` and ``secret_key`` to their respective Cloudflare keys .. note:: If you need a jurisdiction-specific endpoint or other advanced features, consult the Cloudflare docs. .. _Cloudflare's docs: https://developers.cloudflare.com/r2/api/s3/tokens/ django-storages-1.14.5/docs/backends/s3_compatible/digital-ocean-spaces.rst000066400000000000000000000007271475414346200266450ustar00rootroot00000000000000Digital Ocean ============= Digital Ocean Spaces implements the S3 protocol. To use it follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the important caveats that you must: - Set ``region_name`` to your Digital Ocean region (such as ``nyc3`` or ``sfo2``) - Set ``endpoint_url`` to the value of ``https://${region_name}.digitaloceanspaces.com`` - Set the values of ``access_key`` and ``secret_key`` to the corresponding values from Digital Ocean django-storages-1.14.5/docs/backends/s3_compatible/index.rst000066400000000000000000000005631475414346200237760ustar00rootroot00000000000000S3 Compatible ============= Many service providers choose to implement the S3 protocol for their storage API. Below is a collection of documentation for how to configure the :doc:`Amazon S3 <../amazon-S3>` backend for some of the most popular. .. toctree:: :maxdepth: 1 :glob: backblaze-B2 cloudflare-r2 digital-ocean-spaces oracle-cloud scaleway django-storages-1.14.5/docs/backends/s3_compatible/oracle-cloud.rst000066400000000000000000000025571475414346200252450ustar00rootroot00000000000000Oracle Cloud ============= Oracle Cloud provides an S3 compatible object storage. To use it: the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` replacing: #. Create a `Customer Secret Key`_ #. Create a bucket Then follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` documentation replacing: - ``secret_key`` with the value previously generated - ``access_key`` with the value in the **Access Key** column - ``bucket_name`` with the bucket name - ``region_name`` with the current region - ``endpoint_url`` with ``https://{ORACLE_NAMESPACE}.compat.objectstorage.{ORACLE_REGION}.oraclecloud.com`` .. note:: The ``ORACLE_NAMESPACE`` value can be found on the bucket details page References ---------- - `Customer Secret Key`_ - `Amazon S3 Compatibility API docs`_ - `Amazon S3 Compatibility API endpoints`_ - `Oracle object storage namespaces docs`_ .. _Oracle object storage namespaces docs: https://docs.oracle.com/en-us/iaas/Content/Object/Tasks/understandingnamespaces.htm#Understanding_Object_Storage_Namespaces .. _Amazon S3 Compatibility API docs: https://docs.oracle.com/en-us/iaas/Content/Object/Tasks/s3compatibleapi.htm# .. _Amazon S3 Compatibility API endpoints: https://docs.oracle.com/en-us/iaas/api/#/en/s3objectstorage/20160918/ .. _Customer Secret Key: https://docs.oracle.com/en-us/iaas/Content/Identity/Tasks/managingcredentials.htm#To4 django-storages-1.14.5/docs/backends/s3_compatible/scaleway.rst000066400000000000000000000017431475414346200245000ustar00rootroot00000000000000Scaleway ======== `Scaleway Object Storage `_ implements the S3 protocol. To use it follow the instructions in the :doc:`Amazon S3 docs <../amazon-S3>` with the important caveats that you must: - Set ``AWS_BUCKET_NAME`` to the Bucket you want write to (such as ``my-chosen-bucket``) - Set ``AWS_S3_REGION_NAME`` to your Scaleway region (such as ``nl-ams`` or ``fr-par``) - Set ``AWS_S3_ENDPOINT_URL`` to the value of ``https://s3.${AWS_S3_REGION_NAME}.scw.cloud`` - Set ``AWS_ACCESS_KEY_ID`` to the value of your Access Key ID (i.e. ``SCW3XXXXXXXXXXXXXXXX``) - Set ``AWS_SECRET_ACCESS_KEY`` to the value of your Secret Key (i.e. ``abcdef10-ab12-cd34-ef56-acbdef123456``) With the settings above in place, saving a file with a name such as "my_chosen_file.txt" would be written to the following addresses: ``https://s3.nl-ams.scw.cloud/my-chosen-bucket/my_chosen_file.txt`` ``https://my-chosen-bucket.s3.nl-ams.scw.cloud/my_chosen_file.txt`` django-storages-1.14.5/docs/backends/sftp.rst000066400000000000000000000103721475414346200211160ustar00rootroot00000000000000SFTP ==== Installation ------------ Install via:: pip install django-storages[sftp] Configuration & Settings ------------------------ Django 4.2 changed the way file storage objects are configured. In particular, it made it easier to independently configure storage backends and add additional ones. To configure multiple storage objects pre Django 4.2 required subclassing the backend because the settings were global, now you pass them under the key ``OPTIONS``. For example, to save media files to SFTP on Django >= 4.2 you'd define:: STORAGES = { "default": { "BACKEND": "storages.backends.sftpstorage.SFTPStorage", "OPTIONS": { ...your_options_here }, }, } On Django < 4.2 you'd instead define:: DEFAULT_FILE_STORAGE = "storages.backends.sftpstorage.SFTPStorage" To put static files on SFTP via ``collectstatic`` on Django >= 4.2 you'd include the ``staticfiles`` key (at the same level as ``default``) in the ``STORAGES`` dictionary while on Django < 4.2 you'd instead define:: STATICFILES_STORAGE = "storages.backends.sftpstorage.SFTPStorage" The settings documented in the following sections include both the key for ``OPTIONS`` (and subclassing) as well as the global value. Given the significant improvements provided by the new API, migration is strongly encouraged. Settings ~~~~~~~~ ``host`` or ``SFTP_STORAGE_HOST`` **Required** The hostname where you want the files to be saved. ``root_path`` or ``SFTP_STORAGE_ROOT`` Default: ``''`` The root directory on the remote host into which files should be placed. Should work the same way that ``STATIC_ROOT`` works for local files. Must include a trailing slash. ``params`` or ``SFTP_STORAGE_PARAMS`` Default: ``{}`` A dictionary containing connection parameters to be passed as keyword arguments to ``paramiko.SSHClient().connect()`` (do not include hostname here). See `paramiko SSHClient.connect() documentation`_ for details ``interactive`` or ``SFTP_STORAGE_INTERACTIVE`` Default: ``False`` A boolean indicating whether to prompt for a password if the connection cannot be made using keys, and there is not already a password in ``params``. You can set this to ``True`` to enable interactive login when running ``manage.py collectstatic``, for example. .. warning:: DO NOT set ``interactive`` to ``True`` if you are using this storage for files being uploaded to your site by users, because you'll have no way to enter the password when they submit the form ``file_mode`` or ``SFTP_STORAGE_FILE_MODE`` Default: ``None`` A bitmask for setting permissions on newly-created files. See `Python os.chmod documentation`_ for acceptable values. ``dir_mode`` or ``SFTP_STORAGE_DIR_MODE`` Default: ``None`` A bitmask for setting permissions on newly-created directories. See `Python os.chmod documentation`_ for acceptable values. .. note:: Hint: if you start the mode number with a 0 you can express it in octal just like you would when doing "chmod 775 myfile" from bash. ``uid`` or ``SFTP_STORAGE_UID`` Default: ``None`` UID of the account that should be set as the owner of the files on the remote host. You may have to be root to set this. ``gid`` or ``SFTP_STORAGE_GID`` Default: ``None`` GID of the group that should be set on the files on the remote host. You have to be a member of the group to set this. ``known_host_file`` or ``SFTP_KNOWN_HOST_FILE`` Default: ``None`` Absolute path of know host file, if it isn't set ``"~/.ssh/known_hosts"`` will be used. ``base_url`` or ``SFTP_BASE_URL`` Default: Django ``MEDIA_URL`` setting The URL to serve files from. .. _`paramiko SSHClient.connect() documentation`: http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect .. _`Python os.chmod documentation`: http://docs.python.org/library/os.html#os.chmod Standalone Use -------------- If you intend to construct a storage instance not through Django but directly, use the storage instance as a context manager to make sure the underlying SSH connection is closed after use and no longer consumes resources. .. code-block:: python from storages.backends.sftpstorage import SFTPStorage with SFTPStorage(...) as sftp: sftp.listdir("") django-storages-1.14.5/docs/conf.py000066400000000000000000000205211475414346200171320ustar00rootroot00000000000000# django-storages documentation build configuration file, created by # sphinx-quickstart on Sun Aug 28 13:44:45 2011. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) import storages # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = "index" # General information about the project. project = "django-storages" copyright = "2011-2023, Josh Schneier, David Larlet, et. al." # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = storages.__version__ # The full version, including alpha/beta/rc tags. release = storages.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = "furo" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = "django-storagesdoc" # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ( "index", "django-storages.tex", "django-storages Documentation", "Josh Schneier, David Larlet, et. al.", "manual", ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ( "index", "django-storages", "django-storages Documentation", ["Josh Schneier, David Larlet, et. al."], 1, ) ] # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = "django-storages" epub_author = "Josh Schneier, David Larlet, et. al." epub_publisher = "Josh Schneier, David Larlet, et. al." epub_copyright = "2011-2023, Josh Schneier, David Larlet, et. al." # The language of the text. It defaults to the language option # or en if the language is not set. # epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. # epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. # epub_identifier = '' # A unique identification for the text. # epub_uid = '' # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. # epub_pre_files = [] # HTML files that should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. # epub_post_files = [] # A list of files that should not be packed into the epub file. # epub_exclude_files = [] # The depth of the table of contents in toc.ncx. # epub_tocdepth = 3 # Allow duplicate toc entries. # epub_tocdup = True django-storages-1.14.5/docs/index.rst000066400000000000000000000021161475414346200174740ustar00rootroot00000000000000django-storages =============== django-storages is a collection of custom storage backends for Django. .. toctree:: :maxdepth: 2 :titlesonly: backends/amazon-S3 backends/apache_libcloud backends/azure backends/dropbox backends/ftp backends/gcloud backends/sftp backends/s3_compatible/index Installation ************ Use pip to install from PyPI:: pip install django-storages Each storage backend has its own unique settings you will need to add to your settings.py file. Read the documentation for your storage engine(s) of choice to determine what you need to add. Contributing ************ To contribute to django-storages `create a fork`_ on GitHub. Clone your fork, make some changes, and submit a pull request. .. _create a fork: https://github.com/jschneier/django-storages Issues ****** Use the GitHub `issue tracker`_ for django-storages to submit bugs, issues, and feature requests. .. _issue tracker: https://github.com/jschneier/django-storages/issues Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` django-storages-1.14.5/docs/logos/000077500000000000000000000000001475414346200167565ustar00rootroot00000000000000django-storages-1.14.5/docs/logos/horizontal.png000066400000000000000000000672041475414346200216660ustar00rootroot00000000000000PNG  IHDRꭸ pHYs  tEXtSoftwareAdobe ImageReadyqe<nIDATxq90^NOX8X,G`$0rCF0Sr >HFٱľFq7_l0 ru'ZٖIq[Bhÿ$(`<8?bYqSFT^fj( u9|EdUwX'GřRx{_>q}\]eljd 8x_?X!G'řoHkCRnV)XFjt GH:z*azsD"N YL/{HlŒP>S [.,Ʌ)qt`< 32 >YRqfiqC\GXl9Iđx4?gÒ}Cn+6|$$V7t\]Zaɐ|$v/K#KhZt쫇&̆А|;`Y"̘Þ"GvbsO!q7/ߚ { .|qd$Fqqxv\adHͅ\qd$F'qu܇Ǭd²3 92Iđp!rq[ 5u!iD*eA$`<:?U$x%(gNˎru(,8*4?/Wl ˺\ @wIѪx4?tҟ!֧j],uBF(G"=ۣSNDe\3=6V:KN?C-|ǃhe(#$$JpQqf#aFlϊc{^gY:TӺ @̈-==q\.qqu+IP}7@$hԹ7'΂VoX]'Gr1i4qe7ֳjWcQׁKȏDmOM}\Rò!1+ !8,qԟ),Kz[ @&$hHW͈KQ^(ƃh& p{3zk8J"}{q$q9y$E\bzE" hrs!kZsa8B`׳ / ==u^װ$U8Rg8Cc.s G(DK.ړ8fp̈́`!95˹Jq4`<:iXH8N9)|:nӪ?6,O @$hJH$̈́U8t0)zJ&@a۞L(DM zL"%g°I}(D~0 a''RL"c N$ѹ0hJ"c^p"J"c:G3axѾ;J%]#ޑq) @Hx4O$He&@HʅYqp#խP*8R O՝(HP.8R2+%#(D̈́on{~XqfV-z~qK"6̄>'Vdq!̊;$@[.⌰==@$hDO= @_H+nV)~̆G$/WWDzӛd#D98GgyUaIʅO$ȅYqQ5v^)-sRD8_ 8$`0Mj^u;pWq̩:$xt" d$TqǬGd3_+I8N"H=~?Ϊ}$IS>~ L Oqh"ϋIe_Zς;'@,O8;nRUڄ\} ͂"dH"nբ ƣde*,AyUIAH*Yp?1^&:!sX#$nEv'@BI",l ⼙vV #@B"n& ؚ] H`<:$D g!J"M*8rf8$D. w!H"ܝ Eqn,@Ix4k$S#@H#@H>`< ǓruZP:8:!$O>=(Bt+BtD]a8S$ 8S$芡]"@W8:c0 E 8d(@WH%gBtD]r"@WH%C!B". qt)ΐKFBtDD2t/B](t1|nN1 K}'ۭ>no;Z6|ʩn'[v{Wg~@ƙ~Mo_s?F ;i=q)plsשkXUTo9>O+=kkl9q=H57dNl?$^m{\LM+3Eq3+.+)FofĤ\]+PQq0zq,Yael qd+e;$#YgcYrޓ{A\~rVYv 6zh335K.OzI" [ 8oFO+ f~ȅr"@ƕp׫gBz\}?F\}?hO ƣlY!{":7X^UE} qy??/9^͈P^܄%ou$ⶖ| [v}|7_r0 2c 1U~?ggTpU½8D\WW@w}]츙P%%ye};n(tA1xtRWf@q!7 rWD". E /|/q\u>\T7/,,U @:GRg*'B@n:]| 9}K 'KV[>}\t*7ꏛ~p.BXD(h[gqhZgxNxagg@k:Iϕ$'eNՍ3$hE։x4?>*&݉J2dI Eڭ?R/9~)I8=8PBv8I8z!HN2dZRH󞳒UI ƣY% q]=΄c"7{~B2cj=pRIpD&$Zw>uD(hZk[(e#}D\|ZжQ? MjkFܢ>N`< MIohȪ>hr. 4!i"n0M apaR(^pd8Yȁ]/W1U8-:w""I".d] 7Р11o4$,=K3SnEXfn萐p/Wwq?hۺ] 3o8z". `ohBB>qWnwe6։q?GM&!_nQ Аu8 @믿:)̍ :d1e:։ͧYtiF$)~FW^^{.wqq_, sx?8IXΛ\8|ÎIjЀmϥPc.M9Hy[o`8b<r4^bڥ^eq? Аwur_Heiʸ|»[en~2[Ii}/ 6{ÐЖK,Mqۗް:13$CbR}pѴZش!Є~2ާjZ2Wa_8PgB} zgM ƣ㝰>)$B"jx\]^'\݅%cX㯕 \0 YMψ 6 e'G1!rzuČѸR8E%Q ?|KPJ="&HzNض)Jh*npq0> V@ƣ0qhR{Ð$YpG rD1Qvzhe/q} C;4T*݇RL/h@'RxՉx4?F=ݗjt|qP~"ZUꔥO&YqCa`!3f=ۯ1)DbqT'EkҞmu}'xīqq6i! 7WuZ,Z/U)W˘k>Yh…Yql{팸Yc>IL coYާ5v8̄q= Iݨ2zդ몐fR^hyW:̈Y$r.ueRIu4_.u)\ pAJ34e1y:$r5uEq8GfNɴ'1t\UɸKΗP,u b*댸Fz\I &S?D6BBcuh)kK{S&ZU\/r5?"Z +%tC[؋xtV COlD/ 3,/خ1AB8TkqaDOթHfY]н^t0YvIĕ>xRU`+ Iu趒^O"M7O >bPYY+HjXԡb^S@4#7 /S!HKL\沒7̊>'▅& xBZ'JC^K"Le)%)+M䙦1b܌ܾ*i*vgF}QH.7sIfC])_$1^g=:½bl7Gq8-g%/ΦBШ-jv% q=ԌRV\&"qO @O-w2F| >ףjȬ8G"n0ůkDs>$zqBuyLrnɸ7(@%p<Ƭ| (uXC_q3E3Bԫ’s`"D\ÅAkE͓dwJ$h]_3!臇3J;M㒞%tC"h[3PM"nX ޠ }dIv۬։k;-|*I}HNֳy6\UU$΄\-M)eLiexR3B\gT A @A ƣDZ(RTr"ξp{\~ 蕡+̈;xTKS~_o겹?V"q*4%KH̄!{~qWZ"V³,Iu٘368b*eiJxr(t1CL+KTtD/$r΅^FTtDϾpgf~ }_2?V"-qx4)b2L+tJi"Bf* ruSD;~)| *|+#gsM<u{~+ZkHCq[`<VYg~Y_Y-,My' ~gKR~ mH@1 A ƣkv_ I?{gzSuEJ80v5,Π$P[!x4?>V^?L4Lu-WQe/n$@ b6I~jYpO xH L)-mbZV!1zgZ~q@&>Q؋4,Y}==4rT>m $$%) ʌ8TXZ8>D~ !Xgs=s~(M^w{4M-Ǵ&teɿYa{=ì"e)geOs2xQW1۴Zl긫H-5 $JMco 0w$%늰\ݩ Y !C{$JZiXi@1!?>@h(P3q̿ߧx:1?V"bώ{&uC28#n-bsBwu\?ef}'LP⌸`hyx~=|YnZ3X =WEKgV_1Yu_Eg6@lyYyM-֟ՕaG& tm(v": k;Y*z:yNU=8͖B &} ƣLyKrduYDu+H|(v"rL}2 kugVD>s])yFh0 1{-_. {aw!<\?qH\, d_8H]Br+5;^) eyo ;"f%ngՍb qWl,?Z0(g+~R24lZI5`7^c,;3 ,.rZb-Wwds=.Dj.k3ESҔy)a{qqH@VBPD{ggqK@K,sE͓d<L$8*hv>B2.w e{ȿ[x3E3|UL@ B>>Ӿ.߆c<Gݢ>vhjŰૄ;37IWᖫۺ%Y?$"ՐTcqbW_fS#f|ːQ_N*,^poHO\0{w͆j&-7ad?r=h".eQRe3Nmw`@\u1+,%@1Դx4P+pWy_C)Z]VL ]JYqRx$fyߘֿ?~/LCa CDqk鯞8vV'z}r;X /JGAd_w=;AlGՃ46ﶯm):>Nu u. }B_l/?gЏ^n՝p`P3Vyz(-8[=s5y]C{3xwzV?rZ58Ѯ/M"{*֝^Z> *M禘|&7^__JąE@گ?7p)IIAO>&~`iX3hfP bw & iAG$ߞ%>h.:alU>`YfO$^'-]&g{$ѳ݇Rq0yQ?rsߺ.a\/޻.c|ц8???WWUMh̳x{yl'۸bqP_Jĕ<7to4n20Dyz["F|ַ'Igqp+?;rmGqP-Xl.us3qĕF>W{a7|wԾ .; /Ѿ(<[+' O/cz=^뱸rtoquζGo_mbމ8g?2if%7At5 8O&oŠ']'>Ӭ*cP@nf=#:orz^oVe bs~ikp> t}N&A(*6gߙXtgyo%Q޶~hw4Q; >VJRrndD-6IM&Kڥg}/&Uzߴ>z yUҪJa onV݇I\~\, HO/b0 tSMح*ǎ9ȝX|dWS(-Ao=:fb->H4^ظ̬,X[yA\C;ygMțUUP::9tQ[~ {d}Nz? >9|n+5Tr>[6o&o | {bu}`6`x_s>>K^MztopEf"V_9ݣ ~~f]j'O;fȣvMą{06uٻ3i_8~=n;fo9oʃe~.0ɓq%4_}xr;I;q>,TvRg4.ԓyU Qצ--m)ʮ-y{q>7蓖cegc[_`.|ԳNlm/B#+eG;u|'Sކ5 gd@q.j=KkR;p7]?>&蚘X?րEϮDIUf.ܗ?x˘}o'-ļY?c;~)*m#:ru}{]^y?;rtW b97Pt/ A~g^hلUKN88rYT~](b'pޗg-2fd\ϒpz*5|'s"..Q[HuSP5: L/uIH9쐄s™1߻ᐄ<%RqGOLb5OY6o L=׺`^`K"U׸A#Ruul.sI xRГI+~U*dN8hC"Iϟ}P UG\Ǽ\sX&X@A]4վXI2Rݙ&c)}llۣ'5Tz//> N?0ToaC;_ Ϊ2p'M?/{?x8#ϳƇۻ+UY+ZXw8;N K3Im Hc&M\GUmt\%i3ݟ I+m>we!G2Xx( Ngb&U{fm9'-Kz`$\0)-K{A=C^"^\,ff6*;c[N_NIUΊT߻as-<4a '!y>1aj+L{Q6|?nK5 L cgua%?-"{ꯨ; IeU}\6ř63Y_~IhJ9ņ<0G% @~A5o!23aŌ'1? (e p1QPb"Y2zM>qAИ+N{ce=.-՝/TJ[W.xu"dz6†_VN~H}h f'BIKaaΦy4^v)$]Tǿ}+~>3RsJ~ά2wd.⥜8l2J ^W:Co5M~}gO_bf_{'_xǕ.dpo[uurt_5|KYBKDݷ1w e]sY-ŠVȺ|^sY93.{/,Q91LcvLbzUID\H>_4=(B:v}`gqZ 󇭺p}/tr}2)yOgg:<̎Xl!.LJЎym}l=[y Ru N {n8@mo_w'}fVli1j#k$=$Sx>R92u9rVu}^eڧu"?\weX7otD'̓}^Rʨob}Rhw?}嬸^{e$֏V>( 믿j7_y\ 9Eٿf*tlrXK|9\U˟~8(L2yo󐼘:֯~';| nb] m] K}Ȥ  i' ۜp9<xb<.c_`qy2C4)gsuKݙ2Y>>?}n;ۺO߽">yZoxosAN/8OVg?;Ϗ\?a?i?T".㥽P+ɎHAORq˺&39M̖.4w60֝V{@7Yy6l?>YՁIA_w\:vKl'n'~~lD\&}PfG>Gu%azY s>|k4ܖPw.zA_cbe^z]gM}A?ŗZ~Aiۆ}R}y}DsżisqW左jR%ay" >VtGc{e=ں07kc1tC?r~T>eǬ;mӆߧ%.is@*})6m ->76c"&zC;ۉMNLZn'._h'nNwHH1վZ:bsRG;m| t.֝oc2$\xnT.~.Bߥgl19Q$Qq%̎Iӳh_()_9F=;m$㦙]U() !q/ܗSy8[ϥqɧ ~YFy&Dv)']! +݄sI_ -EٯW)|H= 뉺A˔S3sZH }J$3 Gi˞* 9Baku͑.g=omP1ڙ o]cE @!OBNp@q!(Y.o81KO_~|oZ,~wN8h|ͨHO9ԭWx* H̥3I)ڳ}-5v|VSݹϠ,o1 9˞(M[/w1rr 7G١]ogk/s9<Ťa7 |:)9f^/e[foF)J])xj8&R=#|I8梁z;[;1KNuN@cs|y5:5>LO/1trIݟfyu2O2v3IȿIQJ̽9Ee*cN~Yo\ Sě%~(+AG*}5aN~ξ,NN [L]V;4%,e٠/lt_Lo&37YwNr2ctV{Yes,cߟZ9mߗr\abc,e˱oJAu/soc-ߏC:HS.J& C' լ}p&Y#<KMT9]mMNd9ۉ%j'$7,r cplmXxxa=ԣw{r!u' u}} GںALJccjsޗ^zq`Qu#!:>c_\XW1shq;o46? ҧڻ:U_qoD*8?77y-׽6N.&"VU~?VUk@~FKu-5D+ܓ[u$Q 2qgs_Nx5W.⋠^$ӪMlTt?}=~7 uFӃ8Mgeåi|#uĠv"]7:>ƥ+ W9!& W~])DqoNLzaR})ջ' /S}bD\lE#BpZȹ,|G^{>;!~$r"{[%\~iq'ROJO1[=꾤/S)K۝;ɿClK5$~{T$BW\:O08sR&SJp6eW^Z$=P зО}B|,Ÿ()ֳp[{~/T&S5>p ύ2w ͹}eeRңIl_r֍^guH 3_*U/</:Ps{ұ}z]yv"=+>uLPϫ2aP+̊ FG]7 א8 %?GMwܯ,FUw]Xrid) <x~,'tW}+&|:d֥~wbqN鷪qL|wd񤤘Diс)ȝ:߉yBNGMFAUeM?kI8|[U/?wH{%Ǎr_*0&RUuf~ 皻$R~Oն;Q6]ˎt79|18~oY-idN\[̫ʦO)f3O 礪__!ZϐLqm:79Q_gF\AjV]d򅺛>ǻJ0qVVQ_$r <$% Y*UG2eEA_tf9`ߍ0irWe D΍2!1#n?e״E/)M y=h 9u?:wԥEdi%4+$ܖT?]4ׇ,K>4g|@kv85oj=J7un5A~J)"IXz혧xvZqrIaSVr-}75TZ8nEg.Xz!qki f]rd<3I.Zډ߫8S. p]G491ձg%u鬸*E#f3}Y#eyJȗVoz\l0?,[bM PVk14 ]{]%|c ]Hlr:}̆떔uvـU1 Iۉrs91wցK3Cx_}ɭk3YbӃ*Yq-:qUq7`qBFT$܍2@t=:|{K"7z0N/ں-JU.3fZh':~*1J?Յ)S7OHܳZg'uѹfг2xKHXg?tEwXZhk4[_:V'v~xh_P߳$Jυ.ux32q}m oR Žڊ{8,)_~'oQ.l# Wr=$$[;SKϸ ˧G% }d܃РNd_ \"..,lctB'?u`Ḵ5XJ-㷸@@~?OTw$F{"8L8 c}{Cn=ۉ^1~L_JBk~V/umّZh[ZR0ksW71Xw‹7UFQ>q1}3 }:^C6xest2Xʾqtߝf?N/EZa?nh*S DW}ܘ5` r o&Tgk.lZ"<3lDz>f恓6ޓ4 PJ5;q}˺3*m%ßyAhXb}̓|[1#l>k9Qzȋ?k@Y3_֞AuM~O:nܮ;lr>&vC[4r~mPG9 Z7]#}ҷi>&?VqS9`cn6;1; vsg gQ*`H91-?ywĺ3\w_8ם$V_^Ӗ~l Iӧ6U7D\'~'{6_<%}mgny<ų߻[\^mNpkyFs?`Iݙ$/mt.&fN'A[ǻ;݊C0=[{f}i4Ѵ b 9E_}K@ŏqX|Dga>\ lBWn+8.*Fﱾ{-|ηnzew&+_^s\y0blCzh k2d$-/0>.P_#).y@m֊pk}k~bL vtO䁳8VW6<5׵8ai 9 D49FÅzOs{2.mۦ߷rn tK9v-,z~3½HPu ѐ*9`-/4pLR/{MLۘOFypZ$rbul\i*N]=u6N,vV7Wn׭6՝xCm>UKm-HjMMVRy5Sg":~N\ belEYO:e άƾ:tpj#x$2v }p}q^ؿom!Vr"sb~`;}ȁiVܲ{z<Qul0!d&;-g qkA'{W˧[yR}5|O3JY]1ѿӕa=+~wnVl9zI9nx_aM{ötLTvSž0{@!zgXPNUoUKV|s*ޥCLqrߧ}ygn7"7rtc1Lb c"Ip8v1P ב6&]wA!O(qŮeɸ;ImVwkxb=,C|/Mc ;pr4d3b J=9N@$S:J~ۘ{y0Ϙx;=_۰qҶx8Oڢrh ӻHoՄ;;vE:9WuV.NbYv2.Fp[w&8ʯmcF+}thղ:N=+ ;;'[՟(7sm/rbm@.&z9^".r0*,bvcȭ cVrQoҷw\;O`G,fᎭ|R݉GɌf:PWrI|_'i8Izu⢡_ָ+P;\$!VHs(]}kmo'0;Jϥ={$lpK`>D^vV1hO?m#Atb~_QbĴi9bI~| =Ơɯ9!ѵk3353J GW{- [U6_u2">_{ձ>֜KDеv{z3]Y%>U :.'N#X:$4m0nud*5'so])ync:.Vu+>VK ɸNN[1,zh?q}l_ 3Z:z|ce2~@ʾdۈi̭D~zϺDܞ;r"v3QuL|[5wCFi9;kGL18zZhyQ2rqVK4|׃Q]x&'},L|mk[ڞur=X`~θFNQ+ M?~'u>'m;.zEL9t+fIn~E˵'bEɫ&1|wuI'ߊLx 2[ZyȿP:tD,&܁8n=w[t3\?Cp/]<7xnHuֳxG딬=q UU߻~Ps?t|:!;ū1գg3wl:{ypGݩVtW ?tcu(|%.oy=;XIV{>ry~iKr^>u|u!>]ϟJ;Cݑݨ-h ygq<3J{nۡrYn,i5ni,%`h؇<¶^np0B#:r;u}֘p t .Z$SF)ݿN0 2iIq ϊƖ2C_|6{$ЀrK@\\a>O?vH"v/P)GGU,Ζ WC"vY>޻[};N߶aT[8 WC"v$'^)/{sR}&@HH}J"̫|+Iagr$Ϫ{j4ї]@Z 3EO f$~0Sl./WVniZnv|j-E8A;ژ5Oenoצ3E@fgq2qW⪭{k-E1^7&Ǯ CeElԎr$ZBuaT+`69fzr뱫Mrͭutm|c@I$`r}ϕH]؞һa8vt IKmL?x?=7n-J"w*cʊJ6+}py`6y,/rRiMr l\T'-}̉+ŐIzAYjnTPW\XMI11{ݕmO@lq΢:^-Uo᱔ wŃyЅ@)K\yvm[JxMޱ59|/I" wpi`XQLNMسk3 ܦ_aqPЎr&$y\.}.'CƊ[Gٚk1@e{m<5?jGj v&oIz[T$V0@68bϻ߶=8tF${?,eUܩ$>ʵHzbLlQk1 m)$FޱA"tLhC }c儚{l2+~4m/N&>zM#FI$V,OvsO~rDqty tl#TǛ8>K<Ϋ{;JNR9[\lF-i::-?oI|P7b#J%lcl:Qkr%˗(臁Y99/\َR+|x$JۉZ$վLʱŭʙNJ"[(ƽczUqWiaat3ze۵!)*D#N] n鑦VKwIf`(I@k%Ƈ(DQG7q7 V/9u"dir\^:U[*J&˖ѮWEb/VEnbV&8k 7؟@y~"qŔ KѬw{芟>*8՘7(jӧ՗7JN/m r'JW1x[I8T;{%񃷒p0(LFi|_4ɖtDpxX1д< JI|>'(E738X?fЖq NwЗIq %.n3 V 4d\DmrtI@I۹bmOvGV+Ѭ2 se$&@غJ` @$َ'6amSX]'@-t(q91ZUbQu\TCH PvL=<1ܕֈb8sRLN#ί:6} WtE}TzrJTN"6V_~MYۑ*3W>JN{~b^}9VptYGcHMm 06:;?ҕ$C9Oqzg@qZ:u!3 8`$u#![PƶS[Pwq!Ȝ'MnKt-d:34>UZjpnknB">$.I8Sl`df% Eqcs"8TF0'al0 L1S[N^cn (86㢣|zR1CD Y@o:U 8xLQ@{)z`$O"]Z&.֔}1+ (DtۛbH@7]*2I@67W&pSӛ3E e\$n.>)8(tD% tD"-o3E 'giBq@H]CZ~;w$|HEܻ`$`?bH͓ DExT0lqs_o.X%n|2fIAq\ݶH]|]\\- 8 g3_qٕ^bIENDB`django-storages-1.14.5/docs/logos/horizontal2.png000066400000000000000000001125771475414346200217540ustar00rootroot00000000000000PNG  IHDRI 7 pHYs  tEXtSoftwareAdobe ImageReadyqe< IDATxMv0"Yi@Ԟ!3k7OK,7o" " T>iiY?"YwB-??(ɠ?L?E:f[㙺2WWzZTm!~̂ a\8O8ޅe!Ӈ|*ҭ?XNx*Lv^O /YYA?&.?ПUM<$3ðLt#|VWɮXVI[̂ cc'!bba^dWWVV~KCHtdd ͨ7LjMQt<qv̂#:Pά XR uc5*oDEV\"sj&!dAe3ΰI1׋܌u#~VKd Z@2 d?>%_}>fU'bjuH⬭8c+.y+P,8%,b`dqj'fl@9$a-X?[XtH`<]7f@$A'~/,8w+ǽ.$-Ȍ:TJ9,gkM# I3q{p⌗Q'JIq`iR̂ ðDCu J{aYXWAwR̂ Ht'|\gu,AG&5҂imB+u~q?dD"YqA$yZp`YP,3l~ނ2bER g!ZLDK'u'ZTǻ Ҡ:ƴ:13 t2Ǚ:Db#4%%A}~/%{8{a:)b4fo`r{zXf3 Yۈ3ƹ\:b$X@ ̂;Nrl)0gKNvgfn";r"1)YqȸDuWǟUvcfdп>uvVJ.rY8hCu:#3`7X8%#l,,V㏪TGO8`sff@"8S0lIu~\Uݿ xY=g\-gc-DyiY !('1YPqLDHL.Rxea lSu E}C. 03 cENaC7.WȢ 1M X`dl@suM01WJdlG2+㘦Y-aCU'11+ ۲d]zuA )-`AH$`C'~Oѯl&'FLh٫N̂ ?'~\VR$hk?z> J2 8=#]Nn@If9/MX.;(@Hf@9Lɠ/ @gHf@Y>ZqօPYPd m't2_ m&ep2O]JhVYZd#Z8 @2 gPZ/BԯIu\_6~>淢ޘɒs!Kh+,`_!8: ֒uv2菅$`jBypm8N>"@ N:.(B-d qvY՟6NdĄVHJefЄ}@5 'V.L2 hUzJ#A@Rb́=J$4,JeF -_o6V@$&u3!rGaX:KIfM2;Ѱ8;i\( 4Y@jBjc,4̡ov2+ $Y!լ%E:If`vVfE<^]B@zHf"լ!l>ty P,P #ahTE ?Y!1ff!dpHg'0a6fK2 $Y;9!&39[ED2 8ɠ=qfѝHd"ylO2 8mFfT8\7ffEd?FY°p=Y@Q$c8 a_ɾd3+ CާsӸQ@i$c A3fIwz@&G׊(dp,g'04f$lV^:޷P @i$c^CRH<"bIftq2 Ccr҉{P,,NBGGOYRIff=o"y<ėN?+q-@$c =ahNhH|]rq( dtOӿyh, #!hlc! {=>ݧO:^M/O2 Hyw% ϛr),x{_s=M$ ЬВ{"Ou|g_%Oc,@"@&FaOńɠ?~VG_7HdPzUӰKJ߻] "HfDu~2O˶~ @{S2@.,5x`i-jqIDdVWa9 >M.d !8&,☕5wT%dK Ih:Տfi} eo$dl~]}s{WV|aYAh/, 7C!8QBI:8k~ 3A?&)Bq\ FUyx=NxJqOIu^Ь? __1 籎_t,ҡ]mt<=cd{=qՅ[\)_Y#(&M!Vz ys?gaŽ=ԫjӇ0߃~z8"gKrzqK=·TUSv+ڋV(TftĶʠyeJ`k5rѯUy+=*ﻵkU{a???)uFU.1h/q$axVx0]{(M*7RjkOS:mtӷsxլ:ߡTT]7>êoSyS9O_QI?Bm +"|6oG?C| 9%ؤYiuqudT~nГevVfo2>ջ*+N$/K2 6ocX2YS>/a98y]3BYOsm]8)Oq+Zs9[ʿK<^x15Ie)=kΞX3E2 `If5C2 xǪmxOߌUGoU`q3SKYh-=A>Bqn^S9zߕE*7-:T%[>,ؼ!0Y͐|Hچsag½TrflsX.wSɬ|zE zuݕ Ob=g>HXW8ir~EϧUҌBIf z|jdxK~rR4'`_.LB;L]GXu2$LjYf@? RXLɐDt%5knŽm(鱗N 6ۨXe+ڛ㽶-eJ9#ЍDVH9T)dz.aLjy}wb#e6,ȇyگ0&7NBw-mk{ۜd~Mޟ8ܶr6"q6HخdޢԙqRGۭ8@gi]Xf㯰L,?6Z{a@9/řifeIfY?@s ,ґzZ.&>D́i)ke!-YBŗ&$g?{G9] NgNY[ǀm뱟YnW1}ɳp*]枈L3b׋bgJ*2rgj:# 7Ƣu}X>sÐgי*e{-2%lw2rλfvdH=_-Cgn6*̮X^oN.KHhc9_*DtKdPZ3Oy/X> ^볿*Z6י;mu],B, {6aӴ!7i{]7Zǣ'ҌRf.kkpa[/`wm! %^GBq|Y@ B@X`pІ#& gfnv )ZipC[ۈOZ':DY 8!ڰT^#Սk1iQIq4JB$HfYtKd( q0gWC%?7ʪYDAIhdP\7mHdѡ.u%x+{;q9ɁhY%u$Y@) "-:paZB]KY'7/ӛZQ\k.J1Jf`s#AX9m|c| ̵lj&O{;㵵c_(6,f(h ,q֎q"~Xxk稁gB{4qIhg#wb[Uy G$R`cqI& B] y5>f.S9-j*Bۍ$4HU^G?=yJ$@Q ,ҡn6H~ޥdz]WT?ң|[!e*&h,Dhc; 6cTu8 fFvom^|#'"F$q0$ۦ(3}$rIluT[,$ffPt%r%j־K@J(g\c[n@2 (doO BPF9Af<^kx'Y:ctjk,$:d/h ]߲uW&qՎ\Ly*yIKrѦw'η! 2 PCuL6}?淏qxah_R`@Ltܤx}^t'}W}I][w۬Ijwp"mJ5wsfm(mOPSfUl5%Ҁ"39]gF^X&GF?"8nl S1ͬtw.wXO[]} {vl_.t[;._&beJrTЬ8[XI3'1~oh^,2++פֿX\Hlr}Y?La6-*ayIxJ|[{D2|>P[Im{)Jj$'dj\9~ A!X0N34f?ź]_Cރe尉|Jh]/*1pX _U][yd뙶vʑHf I9Z _cɴoF> yʊu^-5i(c֨}gd=S^9ywoL&8,g3hmXkksWfJ2 (MOP3UWҒPq0ꢮ7c@Ϡr\6rs p j ܒ-gQ[gvdP(U sڀ<9*SK?-]2듌zg"$&M 1Y\rzq6kompö6{g9P:@-$d<9Wu7 ?ez^s&umdV.m[{TnYo ɜZCmm'fgD2 bփ'ZzM~upudZ_,ϴ`seX Ld(V@[k"P ,8)V{4dr_-{9 ',5<;L ,q/Qrf{gG^(I?]6Զ6>Q?/{Ef/պCyQ">>F  [<\߻JddL67TUB^x撘E -l7Ό\+:RwiiԒq֎2kkg{bj૷v18{oy\6#:b^L*izkwy?'sKwڕX D\?eT7yͨ~_=}t_pݽs-VJ-r>ߢ̪C2˵V#HpӲ(%qPŦmmm,c{jn9*n; WvބgsUɫ7o~a꫈%P^&ƽnwTŲz>>m}3g,2tik/'8>żmUx-a|PYWAޠ[{1"MT4,goSp9@Ą$I$^hwi.e{[ q;5*Ymۛp{e֞v&4hhw_]nV>oaGwa >/uغ$GW2!!mPuɷ7ù| ΅k{)xhY1 of |h-IKUl{-hȜnBS;ɬ:ޒXz)T}ڑ\Ö\-ƽ:U "ՀA*Hb ˽v9u02:Ѩ M)Xdp]gfVwZ̪Qՠǽ;C(*5n&@}Ϣ-m.2K9$nڶKzzE&1t53߶r*ʹHY\i?bJDg/Ғ/e@Uɬfl 0jY\bS ue.6w{گŚT?l@Ъ^u[:G.-Q:<A]]_Zl N\ iub؋d֎ź ֊3 Va;j7s(m~EV$VK  ;,-=<\Z|NT2:9^S1үyYJK oIAZ-=8 -tck(|)<ZzN(~vVZǬ,JmS?~_u*m~E6$6obe{'-۷4syBVYѹ+4ǪIJ\;'_ڧq)K]j=övnIfA?7f5K :oLJLPrzSޠyVMfXK5T'sm47ɬ;ðV4xYZMfp;P@]k 1\%'[E(c7vPھ7If=]j6Z: .K"9L"Gz]RG oKu{Mz[>d8HYW^u ]*E COk{lnrz`1ekź>˸O2ړȢn9&4o9q!-Iwen*ƚbYg.7\gӳw5mL$p_/|8PbNX(L2+5ԑc=e<Јq!yڣ8tUrPqY"ߡBzӼ{YƁϿLy3'3JRڬ7^s5ۖ\hWS9 1)g{pN|[XMx#KO@ĄHQ`K<aV,,tg؞t=V{Yz_p~S\*Kc0c"km]lsI0]^ӕ<Dž[TԷߠOR9Κ Ԯɬ4 PB Z'`Qy`m\VN^wmc.on 3`⬝f%dGL½],ҽ)͠|6U;B9)kwhu2+m>A1"%nڡo,Vm@u}^"nS$P{Mm](gvϬfD% lVu:zME3e-u}<̚,MB Z AHd/.SY$:Q֓ 4#+&U=NJ"˄PlQ"+~^sр_-FYOW "Y)u_7U?\li 7{deYaU(Hhuoej iDVOf%T$QW:U'P>,~_ 8]׫#aP\{)k/#<œ dJO 2 SAՀT(8>ӑ~EX(#aUL&Mf%S(+;h|\} o#un> cg~N[X.Wˁupj_% nBT{̬`_dzp2a6=}dXi]]_2 _;~b2LVg%TY{ؗjY6glBP4 .29EK4,W duغKZ=\/u9#]㋏xt(*@fU-Z]?t}XT͌m(=8[ZdpV%\Yn)y: ]%N`P$r0XfaĚ Rioq3{j)<^<~ (a \V{qd?%LQ}oԢ 4f(,@~b ih 3 ڡO%ɁC: 5@>W/ [Z('%Xd (\3l(}g㉳#n{h|8 θw❐d,|#HP\uנ*=(_cɬ?Fř@}ͯMPP(mFY _^.5h;`U?:bGxb7̿Bn-4JlK W{8OiťD!6dtk#W [f|m l%.E]Wxy02TPff÷<`o͏AfJWw! [o,=w_X/v=>>.@ɬ|)x0c!eH?Ҳ9A2|zȖ=ڴچd8[6&%>_!IVڂ413k, 4 ˽Ǭ >q=,r8ԯߴ%4b,𒘌;\ҾjMfL@Xkj4K+^#GrK)mYdiG&-fOJb7{Qn bU:M3t{L~ַKZͲ4C0';E~~kRҌνׅƆs^yڒY;:f =MrAocpw[nAL_(,]A^֥}ՏBo]9|qmĦY} vT̬p"&73AA0XjZ8at&@~⛵fg,C-XM-7g2-%eVVmf1)kw}^4hUmJ:STC_RW0+ *Te*,ZG15͸(̂53k,{\u0:}8o"A;TukRȹcȋYL < tC\Rp3Ng~C'Y'/ndYYfa)Z:FCX*q,k谪Kb%1vJhCމR%:ffq/IdZ+So}TtY1; {{qCG;o}r*uwe$̂=2+ko{Hk|pr#h+Jy~byA4V9VuQ[h%8ڻ.;wf4w'ų$üW dD U oϵڷΧXYT&l{tɬi_w"ū$إTEK +^btK ~c"kXr~ҞT$ub-΢?yędqoMgu\]յ ٵ㯻Z߫Ƌm;%Ӑ_3ڸa:v1 ˙ȂnM/y.,G\+qLc}ff}b[iy͗0Yff&c$Y.9Hdlga9g[[;ބvAOTQ߮ neձ4NR7 Q 2H3l,5_fGYy^ם ['}BD5<4{ }xW|"#Ỳ s@zvD"Kc!z23+>ڻgs_׭jg`Mgځ\Qd2eLf_gX( `-*cVhdqsqʿ491O4O v7ȱ/,5^Lb ;2[ggy:/#!lJk V@NZ|nWС%_S[bPF:NF4Yq+XUƜfY-`LELh?8J^c@|w/ﮭILYVwT2\XsAe&V|GX6?yӏSv8ョ߻Zf"G:GDYqshdكiCDU CFrF :Ifͫi\L!X&u"Ќ}`oY/3+ u$RG\FH4sy̲$ J)b\\{Q @zB@]Mf 8Zl"qWW_@6?{|YBbPɬM6A|+ E\WzYHf=mBИYՎ(`,::'N_S!#,5، CaE7 :'k~:!DFfw{M\W_D^_Yֳ~:f&\!e 2b53'2OL6wq% e{𢡊 ؛g j![!cBPpi{˙PɬMXHUc.;-5$~d0NI3 gVZZ).7h,ݝ l:Hwv#_' `w3*%tBv6OVU;>NzBVɬP|5}a6NUǝ0lKsg!BPPgl, ݐfߍD`kB~>=`~^q;`i^$,K|c,l&a6^tPU㰜fzB,3, `N aș WfuS!xUL`\C+rY+Ð؜}H@,3B踩l~# Uǝ0d?!Bg̫c, KF"S!`Y OŴx/М̲B4(Ϗ~bs+ prЀ_e?VB<2{ͯ \TGLzdЏϫ_>zY ZVcݎuM$+mԺɲ۠=_6}TߦrLVN9?=ߏxQB䲺gtm>gu= xT3pUI4`xrp3>Pw=gaS3ɤ~Q` r\ ÷۝2]UN?0Nõ|ykXSu}!ծ+%EAB$zrvZk{*Y5أ̲k_e~uѠ# _&[^Cμ 2>-ff~6za0%=|ߔ6V+}mxocqvKnNk>Q0[S˿k׳1mU^C8ff>iQH ےXt!`pM:>T& ?m7`?:Z_(g޸tD*i68 <@})3b~A|3<t]KO}c'69ݷnח ?f%:84YSةx'0Hf4u}҃v)`A40Ԍ̫Xo񏃃. -Q{2+8%6B/P![N -I;x}V)ueeAo~Ȣqz]S`krEi&jǓ똌 ='X_l- ˲*B3ruԾPw(a(k|S[2/_}m_V2ݤdjIjjV5@b2A]Otei4ͺ_IK!Ž)!64 $^B; ;[cLb{+w{6iawD|onPxY>TEY˄T7D ا҃mh@X.^ o]X|N,/RYu=qי]Lly/ji{6nѦ62>]K=$u[Zq_ "_>sIf0!,MHK| ,bi$>}$Mɰ35L0 ^f̽){6K񹈃[ um}go:\ڸ6^۩jm%(,g;pmǐ@iX 1X.,׵z4՘'Msi8SާKqveTQ[&--|=^uT3=$wqғc7WELOFcHf]6-GҬ?X*)ĺ6tZiFu:"Xe2wZ,QĦ%^/ijmﻴv{HZ%*ᤃUcϩ¹iǾr'k9He{%^X&!VPy4ͤNfc~t~S3Al^ZE~ʝYVMKG=g+zN.$^%_Hf}s*t\`[\DGɛ`d=};sxhA}%"la9!ˁȲ?֏qX{\fl.\uIR{z"Dw}ҞaO E_fka A(4Xhj Pt}k-CSNq flxT^!|ޞ,Tƒԛx=6Z"{RYXfu{Z^U|vHMX.a\z(DO._4O'ػq@ \}-_KH.Gm.ߑ2t]#a6WKU[ȟEP򥷓5^Ci8֒i`Q}Vв1lU>ЪJW{_dV3a帺wyuVLf3f't\sd΋w@OˇH<+&&.ٻ6tQ -u"^ ȱh ı@A4e }-҂-8 bQUUQCjZ" YYwT#+@V wx2C6,鬺<ǻ6@̲BaIau] fr^ȧRBA=J7oZ|WVQI d 5M4(Yh~eVW'-G=  Y"BZ罔zR$pYa,5oϔvj1eϖ@V h rV]笋p뼃<>:ڞw@>8c}x'Y?9 ޿ \srMAȭ8Q + VO-hg9#jxCl##=Nj߻.Ic9UͺuN\^J=Kov dID&vV{1V|G6[o֏MyqSy{k矩ms +X`A"wZ9O } t]7;v d+7of8[1;%hƶL[9"%/Sy>W>z־nw /W+~Kb}+DKIhb6PkIP̶^r+:n|{ Y=`oypLg~>f pNDzV^O%ov^gL~y>b'oS:c R<^k[ǩ ߂Y7x^B =7+SjӞ. !fƊ_~ڈ>sPjkNw8{1v^>oUrutۼ6p^옫L1ǽ~oM^qwOHy{4O({ d; $~٣ 4}R$%%AO{ك6TvNJPuBV坫O+]_mlݗ7{迏y7}oFuzm;Llׂ]>vTo{ dk 6{rh $*a3 g9eP"LM`mlV5#է65^O'ycͫ'>jq4t^Re[m/>ö3WU6m Og{: }<WmxuO1x=K,g~;cխ?ӧ>y=7عO1vh[s0=^Yᗯ_5~$KNdbתܤiJ a_RlRs:+m2[ty-4zꬊ,=c+l̥)Xllߝsle8IUV#sr }b`%&?ua~=TR[;m:V)O{dY=q%dh\;'ދwQN΁J-].̚KbN|O+V_h0 mJ,%svω;Key]HzLvɱ)mkޝm^JmLo3‘% ~Niw\)uV1mbM\@X|iA*suR:~iom8f}eVdY?Ld{Z%nz"Jsj<봅hS`98Kà MR䀭R(\06*}X%=~Ι"l;0(xv DNi`<(7̀r%y[=1s~'XtLeE2~?uKAOSwXvW1;^TH{g2tS\t %XŮNҊx?3=~mEXqvn:ȟ˶SZ>_}qEtְi(Nûw]SGk_󴡿ۦ->/c*3^Л44si [ s\lZau )U9Wʕ;=w?%mW#nIF, ~)I}ȳfIfI)Њt3ۉ:;b416U0+Ϝ:Vi|>/{JB%?s3ԭd2?!#"Mx TU ;rJvmBmsQ Y+)|T޿,6%y>dp+osC4V_՟%&ęgt˹ Vfr>X*uc֝UpK[8iUzU!!;-f٬9jl~RXngǴSOGV,~cM|InY*줤_Ac=g{1[p}yK.ڐR̀./ӂiEiAy3+:,(iM(ı8_ccW,[t\lYtO#UP]aa]/@t.%,cNW n]ٖ7˧U&K[n-\֋ޭ(;JYSl3͟'&W MRtB7&eu!ׅ6r}.ʭm '[_ :]g5tzU&sV.YL-_<϶t7ڙ[.~6VlcxQcx~UΐC\Y:6VtW3l-ԤU1I_)&:toyc%yџr{ecwṷ̂&[Vfm,tPq֣!`ڥ:⼬~S4(uNJJ@~o.ۦz/2j?or f=sPױ"&[g ̍x$N_tO =k0CbU/.ݡ>3ǬҿR{d%w3y\?U9Oc3/#u^|^u7=V^)Yy?V.CKi3Rx3x} j]N':.,J1-^ ]>ג@lO6/} #m/M+rؒH]kF6%Ao'qϝ3IщQ&Ģ0^&C9Wҧ9MU mc5RX5tOaZ$YVewYL gf/=)YɤC Yܿ$wm]-ҮQpU,~LLvrs6ےˮE; >u&AJD ŠNnΫeǤvL$(J?Nz/W}P_Ǖr ^wαh?U!/y8o:k?^l_ꄌYwo yJ Z{ONJ-IbiNȷE#vdirIr>̲QFʸumx {̻\6/=@9(Xuk\{ek{}em6rrrBx@u\7{%[{czZwsJ~{`ꍢ Ғ2Qcu֨W jfrYsڻBa;#cNyq?VZt.j:n& ft2Y@%tjbTIhD.g),o`/<><9N)}}Q_ѶWj^-jNOS/h!%\Zx+y&+Vrt^R.I5΂CUώO-p3&<-tVj>TÚ4ueEYo@Wf:ޓZ>trcr)7g6PĿ%׷mSK, i-6;߶^Y4jŹ}_59n\G=esQ9TwxNbJVU[} nv#ǛiFbeV*0Cn5g`ғ`Ywy|XlqyJ~[}_~k[@C9 =jҭx$hw*U[_Zb7? ͘gt/3S~e'DPI٬ʫ=Vfz{99:y;ڢSַN/ |2~04^N+NmSJ nh#˛’uwG Iǻ+=1|?UUx/k"܊v׮YULEp.ŎkD*IAi* x ;7}ͭÝw؇}Qz,/s},RL2x>_<~LMjfzoQ,rNۼ*{k{c~7amyHmyE`O?G'w?/Re/J ȋUY<ՎjVGgp۟WR!'ӵ3q ],UOYkr~0O;hDݘ?jL?mIׁUYr==|QXF@fGڱ11~ߟ38ucuܴ%6t)}=i(ԟ*[ou0I;)dLH[vDo5:?u9^ͫ[U@|Y%gUۺl4PԷzUsXMA}<i`jӭӫ>Ke}r~Z/]G hE~{|||~]s}[<#oG)ovۣXRC/,?仴0bRs\u(/_~6^E%qU!Dj.E*T2l KJ.?cjM`W`>rL-i0<]mzvTw0>wqVy<Ϡ1mYTZ<5ƕQ*S}ߨbkMc-]}M; :*OW}D~ً;bAǗ)tauEoʯƩ?@ƴ9y;۲=ڞfE+2 4qP&8^l.1 ӵZfˤHwORZy[5쾔:6VsZUZyw~]s]y/UG1=9m3UP=_iUԯd2a-2jPE^)]UY@ڵj?;pO]̨gvW#ˬya?7׻LL'5{߶ Hos [{Β*m9YALز0&>Ęt(3mmwU;52+^1^-U\$VHJ^ےڶU3[o/1H%]kc}tIe"gSr_0iuY5ssU)5nwLSGѬVAK۵6Cqv>ËYUZʮ:R:Q@)n*+pvF7*6h6jg3ȟ˴/1+IA,:FU'4d i_ŴYFK|z,{kꜬA _T7ҼUqVI n]4N+#MR䧲>3y>|^m'j%fi֣9YiOJHr"W]mxGv@|H`뜬uO{~mkYu1bgal ^@kynsWZۊL8Ɣwlޝsx"s~;_jcIVU @dN;8fΥ:-gU?κMuMդ2J^V@֕(!WZCxm-iun0>k 77)3b=VWDcVF W[?qFwwy_g%Ӟo>+<2BYZ~ۆn8y=YiW4dަ:hln0? iiWlX@ v"%vlw6'F-פrVbG +I@khlC7_#kV-o7%Φ):xtg#1ΈZؚϨNi,`]\% Il0 i#S۟ݜf1뒷hUtfR_Y\~;Wi8y}!v;fv:z袂}/o+b+}'ʭjZ.W)0~oZgmm+Χ.qP벋``@sʟk3~/N~ h /67MgĶa.wr&b óخ_񷙲>z|~!׫gSO$W}5=gBk?^t|;~%mQ\oZ\ Qֿ ٰ_gOO2YꇕКwf?NOM(_ zOA"w-"w^;'jEd{+M$UF+_tE:ۿ?Uyh:N|\a\"kZ\-m+Xf+ǂڤ$YWﭵ`cL cq +dCAޝtdNw}V3VnOZYk9,~bk.$O Q7xWևgg,&`]2"o>˚DzluA&[TmxF2 AtFE%OZ-g FN?u>wW|;ԙ0L9Vf ^:q% WéWf3Ny}o:|qϬu='c = +CzP~Gk.ɛՄ>Ib]AF%5d`}4k5"D6ɻme 麶aHN쏭T=?Qw^O+߱ݨ\GQ-!_=T$>kAfmڙs Mi3z_Ҁ oZ0p=5j=z>&|Wi iʐyF)hW8WV|E9|n? LUÊH B2tmm/1`j@d|[aa^˿x]>qqO>IփARq߿l}*5W_fIOuT;)zq寞|ɢ(:qaaQfC|v|:}O) K+tWݏ]`PQzUmG.W<_vN>+)VMj D.䙞3^<ެ19"Ӣr(1;bZ fc6Ob i1wE!V2~^d0e}U$aW3.JA×gTs7uϻFy[<"ggUYcQ](i&6ܓܶiɫƃWUyj01Y'`֧-|z@,V̫[γ0Y[S+aYՙT>Z+q=kNaFGW~@ϭ]_M l;zk釟vEmfeҼnVY듬:;vCH[FqC\_~Uz={@khٵU rĀjP%hK}V?U7TϥxyZu`ku ޽zoןgyަyY5|^kg̻jgdZ0TD^VY듬:;t(sOt=KPEc3l~n~=5Uʯt{ISeUg;k).u楴0~s6GjkV ^߮S۬5Vy6ER&L/G``t@z-Чz*@O&2n&mg_2wjJ6wmՉ,II[ ~?>GM3x9B;‡1, ygQ}yFsbG>xOkJ>Ylz8 HbŎ og|]Ќ}4u#*+_8 {wxrtZ_jy.a(Zë Bh?t@%3%ikck[@ЄϪ Ol7xg_^gsq0߾W YHlY-:[lW_j?;ΪoOsq0_~ 2+ Ȟ4!Zot6We 9U'QC?}`[Ah\_j\m"M ~|^Ⱥf%zFik>OাVޤs.on-ίZv2of!J)4G04@a֧:ۓY ĹXssZNR]AY5SZ9/$?D,׭oR( v񾚏5ېfbЄYo뮓vXMΐMF4]gd@*t7^ $hZ-Y:4]=Xj,Y"a^g)4K0 1s 282oHܕX#ڜN|h363mr/m|+m۝jy>+yJh`v/UZl\\%[l+@%AbUL2@vPWPjgdb. r_}IMr 1ɾ:+GSI_FKmwb8Mf.EnQ* #L1;F{vOJj$QJR L@{2ɨZЦ Ioؖ&p-%:I%ߗɗBWe[=:QmV!dnR!KVeA͖iKEP hM: uDce(`JwNmjŲ jZ-]7[ T?XSQJv0}l/j2so%D[8<9ע bP4Y܊8 I &ʴNanS؉0tY-J pv\~kM6Xo/df^ZgzV^^%`3 ɕ$da$d$I"@)^^JF0 ^J0 V wS p fϷK0 \T W_$ `99Y@ 7f: /_~ @oiܔ붾o/X2 \+%@~I>Ytd"@W"5 c¥@ ,v]_؄`m@6!@[ f,`'Y4M ؙ`77 ++,^HgJ2dD2aeMB0 }]  Ooo/g,ᶾƷW' \u,4A0 \-Wd-$ S4I0 mX I4M0 mX"l+N0 Tmhہ$zJՇ:be7_$^XKp`nkz{y}.)fOrKA, VfpY-Wc%,ẩAr%0ցPae "iP=ǭKV?׃1e>kZ7(j0 @<^ 6B[(\=ί#hA -80ڀ#F{W&M^c3c@ 3XeX`s1 N1kQ(u{k*'M w)AuXu 5^;/8P0A@=4h+@m_;n VӔY0( AbO_3S9\uvn1$a SQ5ޔ8*Eg_/="u8:A$dUځRgHg^' Z_H( ఁ4y8(Ck_#/v!j%o ZiV64z_=ru(6׆rU>ȟ6R4GwjI0iGԝ^5j%d~XM{{]pDi} 7}~T28T ž~R4]]+m OJd;nX1~Et[>O*kN v;k`VM^Kj901~q8jPii&A!4J# yvE2H{i)) 9 !Xa!40JJiMN ^!zZ@C:{rk@t]s}keWX^?A%Jݙy=P0a@gVu4b廋XwIG%%Vt1 `jwɱR@ytsw ~a5R@t]1VN5#w/`iMvإR@twJtq5y Z~M t]0VNh;פ; NNtX :\>/;'=Xuq?7!;K^( h;nu>H]`{xRwؿR(; GH <caRHP?Av^btƂ ?K-uQC ۰>L dswǿއ9PAf|#yS!v4UrP-Af0֎6(wq̂הPAf}% ;J(7f@u>ծ0ͬZqzOBKy@ :z` (M8= €vAP## AV>{a 0j Bh|!XIRL؇ ô7xv"u2'j-?TXc4Vv!cX][<]:ؠMٖ ۵hhD؆ hΓuY4U=>v#e-0za@L)MÛaؠhiJdsRx 8[ZS45cÛ2Ps{h5f Dӗ(D Zw;Ap UJ%90[e%hv#N%6QNI7Xa)}  YcKS*C؁J! 85S$9Cc:FINB Zo`|6 RLc )D'Jf@FJ1a@{;e&#%M0Diڵ2PtΕAPy-'T EFՄ!{:Ll&@JY0a@.{A_0cO ҥ'@E!؍AΧ*=%4Ah p> B8Ai2p)A &MwT4R6A4{A_=a@M] &F.a@SM] `0ɦJa@ ce`0&J6a@ Tnmp)oX*t h^<.7,Mť0 K%6Ax|1S[*A 6f&DWo+Aw=KU6A6xR :kI]aceK%]0L%3%{%@p0V! 鮙 j<8_~ KfJ9v +Af 93%M0` <1SιUAv%`S^'N%:!>6I+AvpW!䱦a@L'#1Ua@| 5#;ôH~ <5U1"B4=A%Zml] cek%h/R$DWOM $'0K\Xˋ Lgn|#FWO LWXL)kZ'u͔.+O a@rWE).zFbOshC a@Wg kk:wa@WY4<ts[e-0JS$*ќkm .3x]$Xɶa@ z2T"iݘ.a@ ;xVEJk#_v"B xE ZV4mu Jt]7xV^{jVÒ낱AEy08y 7ؙ 2uHv8_S? 8Ś*uhq;:þ`j!%aya@j#-m5 JnUxkv gc%iz_A.aoh0AvFJl0팔a&Ro8@s 7Rlo%P Kpp $H I4 `7#%h&Anzᠯ #H G;;G4 `w#%hAJ<0=􆃑*4 `? hA~a #H EJ,0=Y0YaN@'hAa mWvx/&sa q}7PZcAJP>At4 p0OpOPp+ p0jX' p0j( : 'F_ &F_ &F_ &{%( "ΑTΑ 0ꌔ\0:APkLPFL@'3Tr A@zÁu %# @9zA?D{*?!g~>|p}mCޓ?G=7?~8oЛo7w P_xoZ,f|8_t?p> x:0x\(DտY >獺<0X3!V~V޼>rq:=lz{ v7Qm_7|r^2RHGAyyiO{৳+^C2c؃ ^>o2j 7??U,|[*@ft3U* /ÓI@0Md','|8sLPN;60G\@L 9 >̇oatVynC~ؽ2hb9\ټwq6$m<~c0SOtTRx1뮱ݓ{gIαaͻOAթ=v;?o; FZ,wC0Fhw:)4bjƌtlc=7鬩hOtPbi|p66RNfa4'We^wNiv8_jˍf1IFS':JYT*w]J5AFoz_ֽۜBLc8bKFio^_=rRyiXbuKF)o+s{8_*`H<>i"7Goɻ64LӸrp*0ʼqyV{/n6>/:uqT0 w^w hz({9"Auut|1Y[w :'Xq]&N0| lwl#q:¨M|Vɽ^wӿ̞W'u3 fQA/Da| ):bH>o&;yf1Rvv&ZR?]lUx<!T.#~x<&s.N ƮaH@< oޙ~"'0(bLM xJ71H h7(/ ;<@Ehː7YO1AXA?ӰBU4J9%| /a-x\Cx)G J^M\wWSi۴r ni0އx%'J>/ `u<>1zaHc4]&'Jl:nc1 /`1|z1Ԕ$k']`UFwX &΃.08%a %k#促.0(5@K[/?e6Q _H4 v}NM,+Ea /H?T(^jbIaX)$+ƂT5ǟql?~yaZVS!-1B!ax2h4Urfd9a0;JCvW*6m=U#;<rS5ֻJ^(iN JHS%&Jq|c?ۿ6P i< 6t^4ϺaG$;Xv,/? f;$F:?{2GV{=-dzj[gJ3aXa!XZ_+0YDZ yv "in<@ҌpC}T ) +/a< W͂ ߟ° T8]*~a{'G^ vh N0lwJ 8=a؎a[!,vj(0l- öN x=2b) X)^#B0AtA!@°Wž06@Dfx@C Þ!۰ Th]*wL` +Қ`B0mni< Uh]trhio88r:`@1r ,ύkM󦁝 ,'z6 u1 \C<a) ?SAXN:ae{A°u* :Й ,> xp0?AX|0/5n ,>3zӶ$ ,'Se02IKJMlmfH`6o(̇p0i?a`q|?{IaO>4Q0\ N¤%j[GX ,q{I<{e[1 ⃒JyUᙩ-҆DwK:l~ۏ{%8\oJI5@nS}MuLsss_'G8fۻJNoeG;P4w#,=XU5uT{ڳ@||5>^S'{| v]J![}ZFaryHCI(y׳exmd=AtQsPJ0>cayg? FU5krmd9K(]p\S :֧y8 M`]0ʒ> hvUn~GA0]j$PA]MojGu0݊K{p+_Uȋ߆X?|6,ufݱRZ0Ssۨ*yq"w{r_boi~f0}n6VWlrBYc4!qϛ4>aOfS"9ov9|0tm7%I p&AִSa$M{L N(O9Ms/7:.\7E0(]|HPT5DRKM<_s*+G# -sUJD#| ?(7PiWFtIiSΏ6W4_ha16:$ӹqU؃Gv:g@I>|E+>3%$&|ꕺT~Ix)S\B:)EaDRk[ڱ-?EE]$Ny! f],#:aRk FN0u&A */^\P4-RAX.G͔b@12)HZ/ (MS$ LI P$`]׮ma{.@IޗzRjGe.l"04nbw*h\F+yPϽ༴TqAX,R?mir1biJ]&m}׍BnP$-_bG-ݲRvkc "WCScPao8( ,'ug =OxCG-T>0^sa@iKZ8 ,EgyrP \Z?tI`*^rKװ (H/R2%uY ׬A*GH.7K>t5* m0ϯ6p 7O$-pt3ktɼz.JBߗ-N-{9Ipt#wVaA|6;OQ hq|;xa:KUPhM_.jG<T nӻǐ LKh>AC^FD^8!AAX޺1h+A+Fg>)KR HЎ7RIoW: p >gAW'| ߯GiO*Tdox3˫Բtۚ(zʻ * ti` o93ro>*pymԱtHpLDza@uF6AP axzP "5F%^Wp0RFۂD 8SpMZ(pkxvL1J)u`X:-Ke`Hxus+Ci*p5ڞbAץ'0zt5,p(SI5`'ywQ; /a uԮxS%p. 8]3`_m`?/p<AR ,)$^Ao8x/ԭxS%*pZ4*]6ݨgt5C9T U|)c FAXnYф`@զJDI].FKag=fO{?jqs4Q `;MWj2Ut齉:]HTR70UM]&aLS%3P&w.*c\za:{V2-T 8pyW0z?a`N@:e[W9ϻˠ{xVaE`. 0BP('4 ^a}u*zбTJ+R`&N$/i~h,3%D) %xS *p2v%S$y=5*s\0JX(ikK?x̀`m<1Ρ%^z3?6@2xΑOAeakDhq^o_:š\̔R`%d]:Q ҇14I|^ӝJ^^m D6?vSeIoAXo8K DSNX7=U84fGX_9ʼ3%8{%q7P Be):đ]Ke<-TX )GtbCq(-2  tVgDvU}װOTHtO'Zı\Z6xS Q`b3%h3UZlt@ma8:0#!Bhᢩ]_;SO-,.t y7N$ lr> ]KǮgP AZ*P?eI@ AX_=z0K%hL.ݽJ׳KA e褼KJ@w􆃾28\˗VkW;:ʥ(R Z! M +,:V AXlcF@ya,T:HN“滲 <{};@ f Z:5σ}ya*b \)TXjU@˭Ry3Pݪ4[MN%L`'֗j/qp q0 JVP47 v7 ],v,T7-Wq0=]V*#@AF.@  .A}u|2-]:PӕJ@ ʴTvpe*/ƺP h.A4<΁Y@sX/ծ@s8ӝJ@2ɌjVAa2@sfA0a:0JQ , ԣH:€"􆃾*Tfwa{,P1tMKZ~Q/a)s'Q $E%V_& –jR@ aɃ]\L@60P|c8_\*-j4gٽL#%N)wWǷ?6nT#|0S)A&yp 𦼫\%r?&L0u`i2@u֛V+_Z0 ``J$T[0P `/OvwQ ٓ۲L#%N7&bgh2ݛ? –jS$Q9ť22E*m 2Y' 86n5h*/poy0QXl7knU˻Ub\#%7\N` Mt5FJ{k0%uvCWٓ7إΕ8" :sO7*;Yn`RUb+7qp:SH&X(lm0넕Kޔe8_\*p,Hf?x.[QKn-ƣI.>i?l准/a? io(]j-ioe>]7ՙ:m@o˃PS_^g/*1V̔H-?RfA*AyZxT6nTuG lc%\_N@&X(d M~L2V-KeJc$<#ORGXs@%5Hvͳ oS%eNP5&y P\%s?yl{Á0 k)Ge:^0h'U'DͺhY/ fٮEA.1u6nT{x>7|?3ej @wxhzdJTn8_Lh)>f/پ#Űh>p(OƏ@.ff/ p0N'Ke z{t78g8=W`%]|*2`$1{϶s5l]aNz(ޫ7zi@ L+f١߀b|;lk?2@7*@ܾ0Zr7P%v:f,T{xk7<0@3wltn @Gb~YU[4*U Z7Eof^~ }.uo0ڸf: 6wOw*@ aj+ x_TS$h6rooqiIS%!:)kj8m27 u5G (ÅNyYyS%ʭv fj(uOL૫<x=LKj,Tm{uaJ֬#,J$ILc4ֳv3my 0U<<8A067Y'y@4Jmvx}L3 T:eo60ӢSwm[뾀㙑o֩P 3 T`KrW+\EDğ@qb|OzԯA []a^׬@7nwNaj<8WbEXI G]?9 :B`|/@vʩaS(Ýc9K?Tk@9[&m za.կ`94W-NA"]$ݟ9yYdWfyŋ2@/@?мD2TA70{V K$!?`I$}EF{a|?XI=΃%8ݜ5 HBRwT *ez ׯE2K"Q\U{4[!ԯE0PMa q9M/*ת( pc1xӰm`R\`L~ICD7Q 0 `җB/ċj /4aUQM Ҵk$ q^RTedҷD/DHZl):06UU&QcAXVbx>p|si!`F?N47VAZ6&Gx`㓍y2<:~!Φu61$ , Ϳq0Q%jtpe'v8eiN/*41a]p3K˳ԯzJ#hN@ Ga_I0fq}ӎUEW,K"R |cuf7jrp̲K]4H11V9nM:͠oтԾfVXބa!_,'G~17a@c=aA7G]]87a!UQvkrb&W8D荟2yz.K%juպ΂p .4`RH&luؤg^zW:nOfӅRw)B*5Gub҃Ŷ_Nf;u+$∬Ngw;:2 A /RXjee7X4I!tatI)҈N"Kw v$mИEĤG_Βd뎐wFcu7AXZ#j$߳M*P[}>#$@Уgҫe(#"wKҤmXowhJMzXs7AfsOV2HUQ^.38d.em"MzZ(U 2~-zG8/UQA2KkG /`qY9`}|`} 鶚'b1^0]i@w=/<]pe_1d6w#]H={@knk>I+g$ ,t̥@.z0T9Τ\sL.q\\6$yEYBX`o%Ry; )º[藓/-tLFQ_W_ANPɺw {%vKRW,tK"7&:UDcaʯ&/?qVoW1a}#wz XXK$9&c;n|׋\u ]"Cy tǿ;hI ]ɏB >u`Dn\2.Y EѾP,s<c J ho ^ `(}KSZ谾 fcb5P,yVyT`pZjAߝwD eJ[ zʐBz{X]C0s+6\#,as/?zhM0$Ҷ p,ˮzmm|b״\4[ȅ6}!mq)m 7_$^cuֻ>w`CYx2#+ K+ҹil=}TפWC~4/G2 m%<: %qUПĻ,DQޜ̦_B#YrP &]0zѨF[Ah6aف-zf<8/͝s9=:V>;Wpmdy-{Y\VEyۓd||]]WEeֽy0<ʪ(>;Bj;^'A^p&8ige9ɘh0<੟shw]X!j,OvLa0"$@w|dlG8wp#GIOx\ջOy`FqLF{C~v#C('>ap^H\Q4OCa@>qQ5q*dS5jGAlM;%a(ʰ&" 2rAžHaEXN``=>Qú3L ']"AwB a0B70 1! Hlla_UlG0J:"ۃ l֝a0mB= $ ʃ0``+ U ASlf>UEp';P Z5s>-a@zA AXg: cF}C(WJ AXg:NVq-gF}owl 8o˰/ vUu'ؽR4Kv$)= uU1\aGNr{%'~N9Aؑ7&% |qҿrK펒}R:ƥ7Jq|!?C,&G)3da-OU0DOtዡu`kaD/TC>AXWgz<m-nºÍ'?Ӌv-< z bfW|̦{~/ 'RϪКdobC_`$@[,SX6K!WJZ*yGuY9a=J^OXZ毽t_?-/AXߔ<hѤ/]o6K%OfEEE!y6ot HC0H 0g{ C60nq8 ,?gm]ۿ AAAwޗVf9e餅 laC~/!XfNfc_]`'8a{Y)AvNueBVoEY5^R{ƆtCtgB5^vZBvN~q љs%/ 7zw~2UhgX PhYǥ)IGX2q KO,_m g[hYwcij(!cFDJ7%AU1^Zwn0 GFGv\7.Y02+%Ү eF(-\7kFF ,6|2H F,]?qX)ANl-k#q~y*cC2d{aq%±'djz.+0Yāqt姰 d6ׯ'[ {b  |uwlvR#U1 K%OA x30`K%G - x+A;{*ܵYQy8Mmǖf0xgz-6@-`Nfy ~ٓ-)J-1exLlk"\›1,6zHU-`Xz-|' V'#a4.%~,bjXwMUhn;5"ى srkdlk=Q %跴q?-aDF+&7]b 1RMv!UCktǺHH#>4- ;ٕ >RXzC2/֝qRav%f7ytװ^BkMu@+fW0z%nNǥTxnnt0fJFoP,^7y/+jZ:;V34Ol;o`$Ȁ5+G}m|<c%]+а^w!cR+2m]^BS_[m⾏+qVק_QԻ]n݁ٛ ~b=;M *@YKkiFOQf_0ؼRfI%tϒgWN9M;ƶ?hNӇyڟc-Lk1wY99 v}[=`zާHHͺuIVvU)tqA4$} Hhf 6UAx/Cdn̈́P0hև !<=x?iol ۷o@̦W#CM>Kwse4B@oRTb,0zi+sDa4B@_]wXDzH$wNfe`4]#蕓t^~S jUU7`<F !ϸR$sB0QVEy 4I@``4M@g`|R h N( hl x!;%M'}T ^`H>G#'zj8|M8;(+bFGO:`EZ  x+AG%q'y[]`uU1Y @NfE=B 86a4B` 8Y`4l0Zai${;M/}*;e :l:NArAo&al E`B h g!q<hn0Z'/Nf J@a,㻮^h `RE~ѾAHDU`tEޝ<_t `(\3ա|Po߾@ū=j *@}T蒎0bJ@&JYQJ 80>WEy 8aCQcWE 8[Aд*O YI/ &UQ(}$hI|zK"1AД$^+AЄ[K";Ap(K"Ap wda!C!UVEy ؇` eUw]]WETF좬7s$AVq.EUJ Q)4^*A@7 gh>WEy xmU e`ase 0%Asɉ xΧ(ԁ~r FlJȑ ظr JDe͕ ;D>(9} B0FBЍ{%z10n+!XS B#ca0.B0FK!ca0B0FOA@` _B0"<} B0wJ):{P x# " A/tc4* !L*ʹ2*ʥ2a0LUť+4gsgȕRa)ºP|ؑ P|؟0 `paoq(~vpa_qة # KU@ĥ(oei$@7gĥgB08|M:p2 c(ci$t 륐Ñ ;E0h.0 J)}0hm]VEy Aװn5Q8v&~ͳ zH͉wJ)ap8, r_0_UoWUQ.OНEA,Cat>†F& 0x 2"bA>a R@~amX`+| 8kؽr@aMֳJ"` t0ga;?MUlA a~+A@wm {~;tDzř_qJJ@} AaIUoY4J@׊|)p,0:ưk_@kaS_w:]Є8kx݇u A^q{^J 6aC߇./0X0qل\!<]rF g0aw_g`kc*ʕc'2lUϰA_z.IENDB`django-storages-1.14.5/docs/logos/logo.svg000066400000000000000000000044651475414346200204500ustar00rootroot00000000000000 django-storages-1.14.5/docs/logos/vertical.png000066400000000000000000001105351475414346200213020ustar00rootroot00000000000000PNG  IHDRt=` pHYs  tEXtSoftwareAdobe ImageReadyqe<IDATxKnI?,k~V`K+hVP c7Uz𑏈HtMߌȈ}Vd2۸_>msZoz[=ܭT,19>6xl>Xj:tmӸݟ[hU} I_V&:n֡}qO_>6FRCmBiA?.U< 7Co,nľ^[ەYWӭj[ch=?vWN]|+Uc9@ =!7{?Ϋ߳)Px{,tch w*Ҋ:;t .2a 3O Yk1ܙ 3- K0;nR(G`#~޺㎙ ']ykޕŀs ݝЋ{܉dy_sރ (qЇz?Aΐr|ng 9,Y7sϥ=vz:@our49 K \)@sB]^ƬZMc;r51n`=;H&X sB nu` =5ôf(OxhZڒGm>Lt^%f<+a8Bޱ耎aq׺:<+мb@А\sS7Z3.>Zr1W hAИoL6C/c0Oen>@6aLki H06LAs1$H薕wR쥋 }NB> =tGjطS*^zF$C}lD衃<]xJ"^8vQY_e.͆ ?=t~ s햸f=qm?92]w<\*A5¤''7aH4: @<]y{MrF'gu_mqֽuWz:oxwgQ ;w(S:@2Ӹ͞8I~. uȺJBC,Vʐjː^L[؏",G]H(<ĸu=`FJйXaL):.hS#CboIJ2}ύ2tɆWsfKvx@!*A皘}Y,%a{u{u^\: OL 2Y @d,L: PgLTs)tP U $tP uPp,U7w EqvQmOwz[xtPiwr_7y7eZ÷oTҼA,_i_} w}*=Y q=|/ O:: r!]۸.ly?b[CdR u ugJA!m-O z-md t9JA anV'& 4{ݕAr; =Vr-ܼ{b#4 S&ԅ\ ,E::\w=\<* Oy_@b07:nA\ tYd9vs! C0W'dAÕzVz끇@|ޮ 10h>7B@a2ܢ{I020a/:@~Set sO:=uB@4= sjXYd v}u00w&d y8XT/ޔ_M"!J}\iz:J罹Å^2t)д@P8@iG{]S  %(G;U X@!Gl)PUL { XF Sԟye{tTޣM &GIjy ;9g[ E/Ե+@?7-*0,u O{gMCL{4:^5Sޅ&I>M-Q  Х^ֽPS0g5F sty29JZFC`W3%H'[uC(%J s12`B/_*tͰBUSXo4@@fJd[jݓUzw@@3 L7ԅB{~/' .vպc_?z. ơ 3̝z:59*Cng . cLm(v[cK_C1<> rB v_~[^y?]R@?=tFvz_Q;e4 @ZNBRT!;{ձ@лT `R~@  _S%:x26U@xX  S'T O dL  OS%:!9pi#-$*m9g'Is9Zv ͵5F ro-y' )tmV_/UIă\ejce =t醹A'a&?"@+ABU ᵟ Qdt< rgT}*tlMxGeqG2hۇ0,.Snan{ 3aT(3,# wMޯڸ7w_;z roxc+(MnNB+1̅6P0 ́0"fғ+W0 atهi3 kWIS t9yWUޭ2 rap6])@r -mCG\ . &>k+ <-t~gJ!ХӅ0jnzdy07֋7X)uEOG f@IUO.TifW5̂)е}9X@,K tmY^(,ОI:0/]ټW7U Y_rqs~^@wp֓X_d)ݾa.]U&?A!斕O %f^ s3aS s9HX s@Zu0y:,@>@'@M9 SU,CVa] @\ sPtA:a:NuXnNN3̝ s08!] sK2 6] tyQ s'cI ]^amLpQg+.jݽ % t)ݩا:/Lteq8q]z:>C-JYΠ@± >+A.&EeFKݬS 0e =t$(MaXH3_f;! vRYxUΓdt?VO}Ƽ ۷\hP{ZMo2~>~&c1<ǭY-w9oPe8m?>~$njzs>e oY< y&CfӋ ݺ'Ea)p1m>]}_bvlzꢆ0w*U߃\ԝ}1{]<ۡ5g>'%k-qqE :%6{K`xA¬kMSG{,<0v1PK!. L{,nz8 sQ8o<"1%4;1,v< B=zsĵƤ[P=tʹSŸa:C-98Î1+7n\R !=77pӿ|yD! DXIJF&T'I,wJ hr.yp!hb:q04j |% vuq]/\z*? 1yG` =|jT-P͔',(B0ge 0)7iäs <suI!RJH&yǎY½^x}\{J\ cik$hp2 ayY+;O螷p|.0Nv"?̜W<W6՘C0J9Z?^YL QGrQ WRӇ8Ȱ]쪼tꭣT&e-&"8:>:ksk;xN;YޝLEy8讍_g#:Pw{ht='K^Yo@:3`]?W`Y_Z'L tkR\7>!+t[T!_Rn)ۻt}N\7>f@l(hp] i_bE[nWJPӌ}v:]?2`[t:s8Q3` utmuR<2/6U^ oKX@FPG~)ex i/]gQʺsm9ADf]ݹ6|0 |Rlty[kڝB:DRl3GC,e$v&![ ok>Rl~k=^Nf UCHq?~Q 0뗥 gJ\LMv08/C~Yjݱ56iۏYL})p]&V;8 Rh쁽T\'19 % Z tqIqǁ 8oAebd(ǹIqz.肥2i^g`.ښ@@(6%Ee^vgtw猡rh%Lr_LBBc]OPt޹c xF8inzOcs=oµl4du^Ȩqs`vGz0L]Ծ:|'{oB:*C-w9gO Хܤ%No 9^8`"C d2nt4^p07NWCgsBÓnisdCeC/]wP7q]/ȝFsPrT4:RK%5N?ĠXF6 <"4Na,QB]6_PA~/h~͔F=㤩Eƛ t[D(8]T͟72Y2.KD(nŲDA獥 zG?RzcvC/d2 ύ \ F'Rqt::թm1/4rቆ\,Qٹc)a*-w%(% g)NӸ~u4d+C-8wOP|QyJֽ9jYDAR/F-(]Vzre42t޹}Xws 0xGM0)iBWpܫD>LH@yN\Z P#_TH: Ol!ɚU2(Y5FÐPϡs(MGf, i3# ub)F]3uxa]-PٜGaQKe* tk%HNX` YUާ(A@?UH+eX(ARB(^Vn;m=  ui[ Q.qS璥 L{'#u(߬2Mrh DA,eP{ lx]U% 9 =ZI,QP^@C}5>/(ar*=9;kH{((yZ w{<$MY#j| % ?,e}GD*JΗY`8fj=Йe~@ K Fd)핷Cq ΙaDB]8,eFDNEf!dܫD,Q0RiΒ;hyK0OV{Wx ]J{TKwp20K%h% ؄:K@g˗eN*,QcK<Q.R[EC9(\<Fj<i% x.-+KESKZ@v~͐foQ jV 'CXR*}ݩ=F (: R5j+E9q~  *@ǟ #]:Xh:ǡY9@.G(J9,eeہn/* (`,e"y'H`gӂ% ޛC w١n֯7{O Y(X@gqdi~% `G2xJKu9K@w24~l ]/{9ӡ@9]xWD% 6k мU j5*eSћfoe s\%^Xg؛T\ B,C 5![OЕ/MΦh_}t0A\4̅ xU`oRn)۩2(nH:.-2~D 6%94; E$r̕Lœ:}.Jfx}kw/z}vEF@ح}_/re*醹0yϯ*Pn[gG gu$0ޛ[nnVY:k5n@ϔ09-HA1_{@Weer,MBanV\^Z[M?`Hʢ2T`o]c#P2q; 8ė&C,QL`3%eeC>ZMG?dn_6B0V8]S$@اB@t2ț~P9ͥ z tƾ 7zS@AῳI!; t>H.̅,o9]ř th u$vGy:tˁu6ܴ2%@S:%EsUaܲnܬ]hDsKgCmN\|R t;[yz#3Y[uj4޹^u:)L⽺LTQ>zz tnw^ݐܬ2%@0LK1Й{uJ1 6.eޗ3 @}o(c6~A hMq.ͱйȿQ^TXixP! z rgaaTU_+d]fM^o]AVXEڍ0U"I"a] 30^9\WGLҿFm! 7^yW+adI8cPo갰AAn3*tmҗy`B/ Rz}K%V}Hb"] ˴Lu]anVz*)7IVj8ߍcFx =rak~'Чy*l1ͪuވt]݋i0U?$Y&Պ;w$>S'?:ȅEC8JRboRq@b8^!. [_ @R>8r [H.r_.̕#ȅ 8ݥ\.f_OYڅbr qa (mI1J]qq pnYx{/S! ;:#~̨ʬC wn77v7@2@q\\bp.nU x[!oS0+e82d12@Cu݈\H7ۇG!/lgA/6Q (y*е26 /mqC^෫֟7y.Ѕ^.+ i܄/(KɱqKDĨ2zo.@Cݢtzo2/|襻sY8L}p{Rgy? *v;=~ěD"1 !Bi] u$){S^wʬq|9IPt]qd_1 tf%+J=t/BP2`] u4`07-aFA+P< ] u 榥,O0@Qœ@i u0Cs tB BQ tB )- q/ u tB tB tB )_o? 9 t߇T s!f)н.돟UsJ!=u u' tb ?+%8dVrupuY%}0ծvN570'VպF5pE%H@wpZo!Y }+%(^oWJ!5“3`t+宕Bk:-W&Kפ(U33Y tmpp*tcق«N&?uὺ78y]ߗT `T`>[B#F;aۙ>/^@,YW C,,/ХV`N0Ȥ\b)%ASe&@JИQKfKC,dC2_TSoi /q˕ t0 y^\o@ɸ% \W+z”hս48-e„)Tz"04M\9R]bUޭhR sL]+'6օȭ Х s%Jvȭ[@JoP۬+gKnPk\ߕISTFuӤdbNU@.!1#?=t0̏0w!'J'+:u_Kz"\F5UaxR:^v^w@2=;tiU1;3t-n0UlPݲZO2n&׸Q!v) tvmRfYl_uSn]3ew*@A439]X.c7'bXDelH!mU߯9w:U vϝ+AnT pJ$y(gkBܡ]T-tݢX'1^;19m!nau:*^..A.Bܻ}q~:w^M׻wl3- qj *)БO[ŋU\`sYtN4܅͐LH]m$ĵV: wqгMnښs;wɴ2[&Эr !,!&Qv[U[wBcLզHU"[7[,f x޻ͅhsAn tE4gaÅO΄9}9z O+%Mb}9@cwaIY2zuR :ȇ^9:Ȍ^9:Зj+U) 1-`x%YC[9r\{VJ@y\ rtW2 J:r/ V.u G~*dd2֐\raQp@%CܵR @Bo"r @»q1-T^!n MO29@@7!&  [) tB/A Q7v8HVpsz M.&:դ&~nZ[%@$d2>?6z濲t&mW"!?|M`!nZ}xNZL% yu: 6 1]0@qzO޺Kt@anY?uxn*,l)}:;]`3G@?QrK-ܪvsf8Q(ua.t@uY,1#m'kۥ U |R/@'1\ &2 }%@vc.:`7g@i$t@ ~'C.::hƒt@nN+@::ps[:u׉}/v @kY<-u\M_enotf=wz:uZuW [{@s^ow{?j  L; u@Z.3a@Z u6 +|Qw[@@Zv!x 0Ow{H߾}St2'upA臞ZOi? -maK3ֽwO}ץ# }?* S\|@@@    u8d<<[Ѩիu-dMۭ,zϷUeߛk>~zuqWqH;K=׷oW'~:&߾}Sv ng[7I C1;'up׿GRyryXm ӭnk/Nӎf_`k j-]82= ڵ@Gb.#6^;Wn]Ϸ=}>k}N/vLX@㵆,/[@GV4kը;Ox׮<f:yI1c'/^i?cӘY w =wúmR\@ݴ=e;Cs%44ߣgv ˸3?ox<(oFftzC8}2Tċ:c_b[<ȕعҼ| d@ǃ/QP+d:7ě`[CT\WCxb? 9}iLwNCC+>š7W.ʼnm6Fu}OC^s<ts ةjt݋.%6}ܻq~-hߞ}O4::2ivoK/Go'kk~ r+angaZ?bq}'\~kv Бj<6ǪqP#p~>ٍφO5Cz&c ZﺤF|<y_o?6s<8޺QF~+_dO7CguЫ,=\$?zz}p@,|?ʴZ2mkaNw8rYym2IJH6UN.ʬZOGå]f/=<ݧaH!.%6[/`ZUNkʢT>أzY_<kZY8,ve{+БlZr^;OjשƇ E* 9P}]e>@Gjp2:pBm\h!H` s${e] > ǘ9Oa0g8r?6s 4Mӹ %7D:9愺֎i役6cFS/U=NaB]B[sýO=sJK2MAc aƦ0XCˉRwZ?gsEsl6@nanfNf=ݺyoIQ{ߪwn74Y} nqzى:k|vKiЎCfAiam国!hm tskֆNmQe\SZT#F<\4U^mĻ祭9}+=:9mQߛ۞];raKohJ 0U ;׽Fn7fٜǭs@뻑:?ޫ$`Ssѵ72%kMKge1^ tCn/CP2=[^eTCNhYn"oMǒܮk!6R)xDa'zw>Ѐ\ߋ.>c?*A/a.4fzU~*~mmҹ2/֋gps|wonJ)6z )bZG9pwrOhv -O㽯s8{˻Žv[g~Y<&>鵞C~*H- P~C-3EkC R ̛wNmoM1M~_hYfud= 9WB#fzo&|TX|' 9zSH}ЀeԿ+u-UŇG]߷B,n~>6#ХroNj>\Ȋ|Bԃ:&_,~@Eد&q"^4uN<9Dס\~Uڬ9s_f/ЕrmNjע@7U< =Wj&~ ~oy᷆˄/o]s2mu <αw D=%C&E6m_5P?'-]!\o6O'ǀ dE{\-Kca.wg-A¿Qo?;Adna. ҳ\<.cOnj:v.`gtX SRuOx ̹r2sT&vc.>ڣ6]oGUhX[=6=yeT/RtC/`[ÅLoġG<7QlxW +_r{^Svtr^d~_m=O~^ġLa-؋hڽ;]rz0OK[x¤g}ʽrM.@vzZ)KѼY%V #}?x><(ViL +qΧ3ő"ixC̅* 5sxb{8Wf+;[ˣP Ƿ]<&{;?UyWKx"1ht.dB]rX4:0ṲK= ~chX8vоu{}NwS>R)0u\,qz:T74n`Rg+sqz]dKy sRhЩ_; s[E8vSγLB>N-y8yˍC?ϼ!gS t]6Rq ϙ)/qb̫=tܣPr tCw} AіϕY7xZ=)~+WMTCfr\^$x?- qO%zW!izg/*y+_r|̹-O7ɬ} BEJO5܇Q M[(ه4R[ͽp! nC/3㤸CZW>}冟ڲ}یT}zH= rv[0}ţbp[H貖ʇ'R\:ܲ T5{?v8]t.rNM!C-K}' tyqܾ_Kwq{PKwQ WNeTo3<%x1޸&t^\ιҐ|~tsڽos9ߣKw_h\.Z"%)ŋR.w=?W-NNb6D{_ K {m0@΋#%%ŋpe'+K4JGC}3ð~B6\)ӯP:̡Gލ}S߽1>ܕ6ٕ@@׮f\"/?+JR@|th xLt6VRlq _@|.7&:cHf:\ Z䉃`!&[[芒S n^9ݭD&q:ˤVCj @0 "f) Pr(PCUC{> @{S{q`gN@nWS%!ItR`R(H zT6XJv\fQb!(qx 8Pzr tYItR!Md|6ߨw.!丈U7!O4,p>RR\pHw#0:k75.㧭:u1}IƇu/aJ貔7Cizj3Ï -][H{ cSX{*D%ta CJjV}(й5c6=zYjS$t~9s9gox:.g)^Ċt["_ aǀw%BC~3 tdy;'z掅,x(uXN~f%m5̇ľ֤9 m> #vElojDڼz'|c  BN89] dtQ7w*`R%m!,S=K+6/y7Tc\V{1ۢ{.粶P7m''JK&hwF9|^<w.*d(nMEsSkϻԿ!8z\ϯ7Ìo*^EC/cRnu}!c"/]70]_*j "\rue%U<3khgdsn ۓb@wdᦛcC5^|SEl~xS\ϕxet"Ɔ{æSPc[hV`;k@G1B] s*:te+ ϕh8p>E HH=@x}ħĿ]E[<\2 s94fԬs%4.VtK}epxtN\f.zs! sX8j~>ORup'<2OB saA\Wi.ԅ6ŗ,0wopC,q>. L4uV-X~\M8z<3ZwC 3Baq/.Jpg}۸R P%pR"mpв^CD"~J,x:硅E̷Œ3g PY9uc.f1PqqQ[q@ g qNJ.>7LgM@MLv2ӥca'-ȱS/Ɖ${=rL 0t0.cǂ?O? {;JwA]ROpgqʩ>}HeO2IIٹj}ߜÕ>2KwcY :Fq;í32m$Iտ\ csU܍xat{ c\om?q&B<%n]&6c&*.|Y:%͗}UZPĦ ` j+Rr%@fbq= .:2H%sNJA Q}b]c2b {"s`cH8Q)< EC@Zi@ecKxӿ/[r(nWI,tp#GBIQ&zcSɿ k!(:9=bX" #?c!1hI, QRM0j#m16u9}gq,c K A-bX1VuegCA-\#<܏n8;bt1NpI$XŤX mdАl;qX5rEj=2aK0 ,^H0 #t n ; TM"F]5eDR2X : ' tf5aׅ` qڧW:® OCCDW:0X!)qfُՅY3吋IWW֖"<ҏl8j?JhulhwU`K,4R[,$>JJ]}@My-;֜[SϹr)\ :2v%Mv#~1օ;ynғvS}CA\Pph݋iǏЋ+{!ċɮ ^w/Է3oNvا \ Cʕ|nTZNizvX(TBBdA$"ޭz+g?ga,^y#2:XCQ|` V=!ozy=9Zߙ2谐B hUX,p ̋aX{sd M  &=I,hM@,B-ϰmTbI bA]w{\ /Kw8&4PR"\TVA:zdg 14gN CP/ӎ| 9sðZtTsz('ZtM]6r5l>ʛӪh{@B"{&݇S%>-uA<](D1'{s@P{^y:hy-Sm ${D}.!|.P s z|@-B侇tZӼЂ}r?}rQh[EYj!G &@KG|ϧ{*獍73Abґ&MRʌՍtr˥a@y}} "ʺ f hYhN<9flX 4Mf7R C܄'ABLY1 5ܯ_eG"9cA/@tX읛МP5Bs z!1[ C&DpL~ ޱ6E]A]9i2@t.:LAK^CV^AOb.,@ \9ZȈ_YfC%nuzܙ[X:DARy@ @"F!9/&j.HfRo)z1@pm@;A}po)|`/qڳg~0Jȅ~Q{ߙ=h@~㇣W~ϩP',TN)7T?󽤏S`0@H+ԟB;'!Mz_0ʓqڥay={Hy$N!p%_{*/P';pS670 8=&MJ9+OX tރѠ.{>FƘV{A*, ^EڗK'=[N :Zh1&9sp( @襺W-ǰZiMi:cpm@LBsw2-M#_wrvb&dZF)Y_/1iX"~ xGE4U&"*]P_dlB% &a哊?yV$U5901]" :E}xKQ~:[p%CnD{V AƁ Ol4q\ #{ُ\]9I*7EAC@W w>V2gG&t:ϦUpKplMi@!tϫ1ӧf%yMD3780f YkE0t@t:@t:@A:@A:@ A:@ A: AEϯc?{.=}61Ϟgs*eHirllcbwcb/ m?ĥ<]mxYYrȜUH6uN, :֖X6ޮo^__-+6 _!3-gA3uT_TMiVڻC36]`:128w}C~7KŁ<kja}.#Im(MoBeMGb)b֛`.5bkm\JStNԍ1 {VkxeJѸG̙uM49؁B272oOmc ̭틺i|mw4Ɨ,RbNH0]AW8 & xmb@!Yl{L.en5eñף)БEG!wDG o 1Lu!܂ЖC|]M'̾ A(  5oԵLvn}zrT/|md ȼO#J}`Xsh.YA]ctE! Yg~:x7dA3t.{ mm :2n,vHHٔf t]pUgPv Ľy+P@XE&W>W\R6Ĥ[3bi4k·0ghbnr_p^S ²k`Ǥ #{&]"Ŏ!pOa G5yXèw5Md] 94sy2#<Hz=SKN̅@Ӝ u#lޟ41XiuU2O]3`^SdqXS5E)N +s݃8w uZe]b. :p#8X`ƍ u踔tUeO"r ;SB[4E6'5/+*s›mP~b+5=<;+sr?|Z:*N-^ȳP}a(dOlPsiw5Y""* Vy"T5Ē"-5\t:\QiPYQS) >icfwYjK5> ՔXAԙlLNSmv{Kac8ƈ<89WW復TO~"\t_A;&mJsF2V2w s)d.j`kIRYPӕ X&kJ1M6[/Gr %ɣ&Yfxŗ.]y$kvKOu9;$ሪBIE!H%.*0>`cUd2[ZuW|mkו% ģ j1Wtuv(k+WÄ\aC$2qou5Uɨ>:L넉{X؝S?oHflU!tQd9N΄IY:CgG4.g;Q_QAiYR)1cAfI0.K\qٸU1hI  aSLi z*$1no|oʟjߘhP|ǞiQ#&|ΧCgnO9~8Mh t-'T$۶V!|qܣj9hZ9 N%+dI[&J1T6~8q;A灻|n~eg.. Xw"ydieL3 mdd#^SGGؠ͓?mUkXj)G:/iFzvNm.j?k]9g#ps7 d]52H^wL[tF}vkYn 6r/5M9xt\eXh[Mľ?<,®{;jbcUpnH@p޵Q-#ޥcm#ѫ-gc*;$x "̀|3Z=t|!TuNiGlУkG}nrl$k=?Kjt[lZJ]ͬC7rN4a"AYt3Y:"mWӓy'R.; Hu$Fc8IUa!IRAn>)*g%r auS0+X@ҵĥAo䰈 eǫ?c,9lh~hDُ/ "L K4 ^;K4==Q'Y؋$Cg%@t^"ʂD0 :W7L)  XhVUhÙg4@#'P\͒9`ˋJZP~y\iS뼨T&u{%8%&Bԕf~ /|{eƭbm!Iҵ sDyZEԃ/.D^ Qt\:[C3{M*tFf6inӥr,K@C3wGKA)bq'(SXd#POwVq,'>P!& XfNrI/Ǣpu`_`A$>q.M^AcF .,f q)e-{Nt9{:9c_8kraU=̀B}>aFƐ/G/'=7IȿUؑ,jJЭ NJ1?Aי~`úk'o:9,`O#f vyVᆘd +.a8K.;P2/ci𝆑|H#w?,K.;!USH)d>&tb=RJ62g]2>4.c[< A>7[m$&G[?ci}I^5śSMѕ]{nCc̵,3YkX9TWGFkdp}%ş8[bN T5d"6#v4Fqcp7N\xl*pe[{fŔSQrK_`^TN^ /W9o +K[\ 6R{}aze;}wAydq4v5E/ə}AWBy+Ѽ;ID+P]}7o;]e$]#!@^ bh5.\Cx^F#x=xTtewZ1L>XRLa'.޸ȦݽC4 @и(&<\vqiLRVJh9z<-=X] {Gk DWrM-&zg]h}fs= Aw3tȿW-!dayEܫYu$}$5nz(x˰Ϙ*c߭Z#co.;42دiEQj 쮌))*|Mi~KiϞT7mpO]gL?5d,YA&ӻ?;][)O=|HI%ϕ^Q :oiA;~FwWh1՘idɘ5Sv~Vqg9hՠuloGb 㾎9(SE'.D_#|E`\mSDş;08QsP,@ń#>n_-i;.9/"!|ZfnѠd4R'CwE.6ܿ:c**20>[ ;ҁ2BVí7?STq0ٗJyL/ESRhQ pϭD|21mA|tӱ#l:RY)mUJ.ms_ b7o<(<8Ь˷6R?>X+D{[?t DDQN喠꠾MNS:a+*8o~ HԱ ΌPNCvӟ25)&G^,Ex7KEžqB{ A߉ɨn&xzʇqKdh%̡ KQxDJ\t /9Q1oKk^!!'lTs5xw},7O7cXUX^8ٵ hS!+CJ 2H7>9 }E}QʏYCf)FY2;Zi@rW,΍OG|Bwy[f@3zߜaq*dgJ1]ԋ '72Nl>hߘV4f*#%"KNjrZT ~"USU4(F.Y{), Ha%uPXkgQH0yveľJ|q\[<+F2\sղp8k'.KeXRg~.mGju;qZ&ޜ2Pzcjl'>:Al}%`ٯOp>`ţ뚞H|Ǽ~.t-/wi[c{#q^c'@Er@ (R-8q!q%j  z^AǾZ5y̬:WGk,a@dA5??wM*}bZ9=MUnb1'J}s3lڲh:P@*ڏ NJLl2J9nuUY b\l{;zM2;b fZu6!(&ET [Jv\#ĜO$n0M:ð=خgh Qgwt$Hvy@̭Y 7W 8[3 Wت~%ݰӜ8WFMٕǔbAQ;(skL zxYẢjc  BۦL<90+(M}C6->4AX8SjSVTN[#O;!i#w7& `n 1'}sKScҢ32v8Sz!38v)r;A Q,֤c\M#{=SsSY;)=Ru`o92] 'uiȽom^)goLS-eb:fC0N RƎd.ͭ\U&'qluV@tOM :JyٚSVEsCt!{xG@Rr\͓a|( >bųE q :mRLB[{/ja-|@ If̞c.@M&Coȳ ({Ñ-?ɮV E29f1c=/Ns|_TjԞy ?沈m; ޓ!_,v]# v#-?"-ٺVA }$8_. NK&m*:IVnj"2K!쭃by}u08f+): FF 8`&O6UzpPF![.ƝN@duo{5(I  _Y_-r6#6K?Q=!S4a̮f V#TKvl Ԡ{اRXEWse<|-OxVNW mcuL V]wRN ee=B-fU+=oBׂD|z|~}%I&]+aY vc{V'EX]ޕ#*b.I㻱 M6+R3|H03 yz[;4 }cd0nGH9qA9pi[U' 5a'p'E2y.4}䃠ctLe`4'8tcQ Too "\z߅- *rM~ԶL+tk0A" m5b,u-L@y&yT7u:?c%w'|4 :@q0>2NLq+0\TmDg Rֶ̓.lubA4@x7ڷMeXŋl%V=}+yoʻz٘OGOԡa_!".>wFEy-nMNy0:堆dL+蠍,~1˖Hj~7Z|X* "['Ok~0q70wcF݆kC^ÁL ]۵_@{ӬȒsИY )x ҫi*ɾ#fj ^'W: _[i 3azU:(_GkBj[5>j *   A:Ao|A@Ybraup BNH:@@01Ξ?t)=Z:*ٓfIe[@i7!'啲OvϿ*.uMF_^`2tIbN8-yv!CI(7JN|-< :MT[|i/ 9=.\I=?i{ˆ.ίZ3=ApվOUuauYget튛${k6cA80`;;+V6\Omf=ll軓w^ `2tЦ!7Y Wauw%;uDC gV*9{ s* r1 B;Ҧrd:㰇2`^fqb6"?2tP2ǥ!1'|kuRb|eB ;0$b:Īɞ`iM68)>\@"BΓy̞Q& >N,]:_̉DM2!3=ePsvxl0G؇KKV&f/aLm){e!'.#i ]d2JX5Oxap%T*`׌1:1n#'\ !Xdt K1&@{X 3s:/ FA0PziL|1OXj*~|&YdbBj% iKk>GF8=u7X 9?d)ٳ &2tWُk\Q`8 CӉv 3:h+9Y1 d){* @gC,ќɳg@T%X1py8jQ7~ 쩫b nCÞZe:E]/!eWXd2!7 C%  ˰koydW"H%eeP1 Zu${N$+7fA`J؍@nws':ˢBE g8tITvrV9 K1ꀘ b:ɝ|]_d.i&⦴( .. "L v>mg^>i&RZtlj^B++2+YD`u 0'XIENDB`django-storages-1.14.5/docs/make.bat000066400000000000000000000106611475414346200172440ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. changes to make an overview over all changed/added/deprecated items echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\django-storages.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\django-storages.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) :end django-storages-1.14.5/docs/requirements.txt000066400000000000000000000000341475414346200211140ustar00rootroot00000000000000furo==2024.8.6 Sphinx==7.2.6django-storages-1.14.5/pyproject.toml000066400000000000000000000076431475414346200176310ustar00rootroot00000000000000# NOTE: You have to use single-quoted strings in TOML for regular expressions. # It's the equivalent of r-strings in Python. Multiline strings are treated as # verbose regular expressions by Black. Use [ ] to denote a significant space # character. [build-system] build-backend = "setuptools.build_meta" requires = [ "setuptools>=61.2", ] [project] name = "django-storages" description = "Support for many storage backends in Django" license = {text = "BSD-3-Clause"} authors = [{name = "Josh Schneier", email = "josh.schneier@gmail.com"}] requires-python = ">=3.7" classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Framework :: Django :: 3.2", "Framework :: Django :: 4.1", "Framework :: Django :: 4.2", "Framework :: Django :: 5.0", "Framework :: Django :: 5.1", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] dynamic = [ "readme", "version", ] dependencies = [ "Django>=3.2", ] [project.optional-dependencies] azure = [ "azure-core>=1.13", "azure-storage-blob>=12", ] boto3 = [ "boto3>=1.4.4", ] dropbox = [ "dropbox>=7.2.1", ] google = [ "google-cloud-storage>=1.32", ] libcloud = [ "apache-libcloud", ] s3 = [ "boto3>=1.4.4", ] sftp = [ "paramiko>=1.15", ] [project.urls] Homepage = "https://github.com/jschneier/django-storages" [tool.setuptools] zip-safe = false packages = [ "storages", "storages.backends", ] include-package-data = false [tool.setuptools.dynamic] readme = {file = ["README.rst"]} version = {attr = "storages.__version__"} [tool.ruff] lint.select = [ "AIR", # Airflow "ASYNC", # flake8-async "B", # flake8-bugbear "C4", # flake8-comprehensions "C90", # McCabe cyclomatic complexity "DJ", # flake8-django "E", # pycodestyle "EXE", # flake8-executable "F", # Pyflakes "FLY", # flynt "G", # flake8-logging-format "I", # isort "ICN", # flake8-import-conventions "INP", # flake8-no-pep420 "INT", # flake8-gettext "ISC", # flake8-implicit-str-concat "NPY", # NumPy-specific rules "PD", # pandas-vet "PERF", # Perflint "PGH", # pygrep-hooks "PIE", # flake8-pie "PL", # Pylint "PYI", # flake8-pyi "RUF", # Ruff-specific rules "SLOT", # flake8-slots "T10", # flake8-debugger "T20", # flake8-print "TCH", # flake8-type-checking "TID", # flake8-tidy-imports "W", # pycodestyle "YTT", # flake8-2020 # "A", # flake8-builtins # "ANN", # flake8-annotations # "ARG", # flake8-unused-arguments # "BLE", # flake8-blind-except # "COM", # flake8-commas # "D", # pydocstyle # "DTZ", # flake8-datetimez # "EM", # flake8-errmsg # "ERA", # eradicate # "FA", # flake8-future-annotations # "FBT", # flake8-boolean-trap # "FIX", # flake8-fixme # "N", # pep8-naming # "PT", # flake8-pytest-style # "PTH", # flake8-use-pathlib # "Q", # flake8-quotes # "RET", # flake8-return # "RSE", # flake8-raise # "S", # flake8-bandit # "SIM", # flake8-simplify # "SLF", # flake8-self # "TD", # flake8-todos # "TRY", # tryceratops # "UP", # pyupgrade ] lint.ignore = [ "B028", "B904", "PGH004", ] target-version = "py37" [tool.ruff.lint.isort] force-single-line = true known-first-party = ["storages"] [tool.ruff.lint.per-file-ignores] "docs/conf.py" = ["E402", "INP001"] "storages/backends/ftp.py" = ["PERF203"] "tests/test_s3.py" = ["B018"] [tool.ruff.lint.pylint] allow-magic-value-types = ["int", "str"] django-storages-1.14.5/setup.py000066400000000000000000000000461475414346200164150ustar00rootroot00000000000000from setuptools import setup setup() django-storages-1.14.5/storages/000077500000000000000000000000001475414346200165325ustar00rootroot00000000000000django-storages-1.14.5/storages/__init__.py000066400000000000000000000000271475414346200206420ustar00rootroot00000000000000__version__ = "1.14.5" django-storages-1.14.5/storages/backends/000077500000000000000000000000001475414346200203045ustar00rootroot00000000000000django-storages-1.14.5/storages/backends/__init__.py000066400000000000000000000000001475414346200224030ustar00rootroot00000000000000django-storages-1.14.5/storages/backends/apache_libcloud.py000066400000000000000000000160661475414346200237650ustar00rootroot00000000000000# Django storage using libcloud providers # Aymeric Barantal (mric at chamal.fr) 2011 # import io from urllib.parse import urljoin from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from storages.utils import clean_name try: from libcloud.storage.providers import get_driver from libcloud.storage.types import ObjectDoesNotExistError from libcloud.storage.types import Provider except ImportError: raise ImproperlyConfigured("Could not load libcloud") @deconstructible class LibCloudStorage(Storage): """Django storage derived class using apache libcloud to operate on supported providers""" def __init__(self, provider_name=None, option=None): if provider_name is None: provider_name = getattr(settings, "DEFAULT_LIBCLOUD_PROVIDER", "default") self.provider = settings.LIBCLOUD_PROVIDERS.get(provider_name) if not self.provider: raise ImproperlyConfigured( "LIBCLOUD_PROVIDERS %s not defined or invalid" % provider_name ) extra_kwargs = {} if "region" in self.provider: extra_kwargs["region"] = self.provider["region"] # Used by the GoogleStorageDriver if "project" in self.provider: extra_kwargs["project"] = self.provider["project"] try: provider_type = self.provider["type"] if isinstance(provider_type, str): module_path, tag = provider_type.rsplit(".", 1) if module_path != "libcloud.storage.types.Provider": raise ValueError("Invalid module path") provider_type = getattr(Provider, tag) Driver = get_driver(provider_type) self.driver = Driver( self.provider["user"], self.provider["key"], **extra_kwargs ) except Exception as e: raise ImproperlyConfigured( "Unable to create libcloud driver type %s: %s" % (self.provider.get("type"), e) ) self.bucket = self.provider["bucket"] # Limit to one container def _get_bucket(self): """Helper to get bucket object (libcloud container)""" return self.driver.get_container(self.bucket) def _get_object(self, name): """Get object by its name. ObjectDoesNotExistError will be raised if object not found""" return self.driver.get_object(self.bucket, clean_name(name)) def delete(self, name): """Delete object on remote""" try: obj = self._get_object(name) return self.driver.delete_object(obj) except ObjectDoesNotExistError: pass def exists(self, name): try: _ = self._get_object(name) except ObjectDoesNotExistError: return False return True def listdir(self, path="/"): """Lists the contents of the specified path, returning a 2-tuple of lists; the first item being directories, the second item being files. """ container = self._get_bucket() objects = self.driver.list_container_objects(container) path = clean_name(path) if not path.endswith("/"): path = "%s/" % path files = [] dirs = [] # TOFIX: better algorithm to filter correctly # (and not depend on google-storage empty folder naming) for o in objects: if path == "/": if o.name.count("/") == 0: files.append(o.name) elif o.name.count("/") == 1: dir_name = o.name[: o.name.index("/")] if dir_name not in dirs: dirs.append(dir_name) elif o.name.startswith(path): if o.name.count("/") <= path.count("/"): # TOFIX : special case for google storage with empty dir if o.name.endswith("_$folder$"): name = o.name[:-9] name = name[len(path) :] dirs.append(name) else: name = o.name[len(path) :] files.append(name) return (dirs, files) def size(self, name): obj = self._get_object(name) return obj.size if obj else -1 def url(self, name): provider_type = self.provider["type"].lower() obj = self._get_object(name) if not obj: return None try: url = self.driver.get_object_cdn_url(obj) except NotImplementedError as e: object_path = "{}/{}".format(self.bucket, obj.name) if "s3" in provider_type: base_url = "https://%s" % self.driver.connection.host url = urljoin(base_url, object_path) elif "google" in provider_type: url = urljoin("https://storage.googleapis.com", object_path) elif "azure" in provider_type: base_url = "https://%s.blob.core.windows.net" % self.provider["user"] url = urljoin(base_url, object_path) elif "backblaze" in provider_type: url = urljoin("api.backblaze.com/b2api/v1/", object_path) else: raise e return url def _open(self, name, mode="rb"): remote_file = LibCloudFile(name, self, mode=mode) return remote_file def _read(self, name): try: obj = self._get_object(name) except ObjectDoesNotExistError as e: raise FileNotFoundError(str(e)) # TOFIX : we should be able to read chunk by chunk return next(self.driver.download_object_as_stream(obj, obj.size)) def _save(self, name, file): self.driver.upload_object_via_stream(iter(file), self._get_bucket(), name) return name class LibCloudFile(File): """File inherited class for libcloud storage objects read and write""" def __init__(self, name, storage, mode): self.name = name self._storage = storage self._mode = mode self._is_dirty = False self._file = None def _get_file(self): if self._file is None: data = self._storage._read(self.name) self._file = io.BytesIO(data) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) @property def size(self): if not hasattr(self, "_size"): self._size = self._storage.size(self.name) return self._size def read(self, num_bytes=None): return self.file.read(num_bytes) def write(self, content): if "w" not in self._mode: raise AttributeError("File was opened for read-only access.") self.file = io.BytesIO(content) self._is_dirty = True def close(self): if self._is_dirty: self._storage._save(self.name, self.file) self.file.close() django-storages-1.14.5/storages/backends/azure_storage.py000066400000000000000000000333771475414346200235450ustar00rootroot00000000000000import mimetypes import warnings from datetime import datetime from datetime import timedelta from tempfile import SpooledTemporaryFile from urllib.parse import urlparse from urllib.parse import urlunparse from azure.core.exceptions import ResourceNotFoundError from azure.core.utils import parse_connection_string from azure.storage.blob import BlobClient from azure.storage.blob import BlobSasPermissions from azure.storage.blob import BlobServiceClient from azure.storage.blob import ContentSettings from azure.storage.blob import generate_blob_sas from django.core.exceptions import SuspiciousOperation from django.core.files.base import File from django.utils import timezone from django.utils.deconstruct import deconstructible from storages.base import BaseStorage from storages.utils import clean_name from storages.utils import get_available_overwrite_name from storages.utils import safe_join from storages.utils import setting from storages.utils import to_bytes @deconstructible class AzureStorageFile(File): def __init__(self, name, mode, storage): self.name = name self._mode = mode self._storage = storage self._is_dirty = False self._file = None self._path = storage._get_valid_path(name) def _get_file(self): if self._file is not None: return self._file file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".AzureStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR", None), ) if "r" in self._mode or "a" in self._mode: download_stream = self._storage.client.download_blob( self._path, timeout=self._storage.timeout ) download_stream.readinto(file) if "r" in self._mode: file.seek(0) self._file = file return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, *args, **kwargs): if "r" not in self._mode and "a" not in self._mode: raise AttributeError("File was not opened in read mode.") return super().read(*args, **kwargs) def write(self, content): if "w" not in self._mode and "+" not in self._mode and "a" not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super().write(to_bytes(content)) def close(self): if self._file is None: return if self._is_dirty: self._file.seek(0) self._storage._save(self.name, self._file) self._is_dirty = False self._file.close() self._file = None def _content_type(content): try: return content.file.content_type except AttributeError: pass try: return content.content_type except AttributeError: pass return None def _get_valid_path(s): # A blob name: # * must not end with dot or slash # * can contain any character # * must escape URL reserved characters # (not needed here since the azure client will do that) s = s.strip("./") if len(s) > _AZURE_NAME_MAX_LEN: raise ValueError("File name max len is %d" % _AZURE_NAME_MAX_LEN) if not len(s): raise ValueError("File name must contain one or more printable characters") if s.count("/") > 256: raise ValueError("File name must not contain more than 256 slashes") return s # Max len according to azure's docs _AZURE_NAME_MAX_LEN = 1024 @deconstructible class AzureStorage(BaseStorage): def __init__(self, **settings): super().__init__(**settings) self._service_client = None self._client = None self._user_delegation_key = None self._user_delegation_key_expiry = datetime.utcnow() if self.connection_string and (not self.account_name or not self.account_key): parsed = parse_connection_string( self.connection_string, case_sensitive_keys=True ) if not self.account_name and "AccountName" in parsed: self.account_name = parsed["AccountName"] if not self.account_key and "AccountKey" in parsed: self.account_key = parsed["AccountKey"] def get_default_settings(self): return { "account_name": setting("AZURE_ACCOUNT_NAME"), "account_key": setting("AZURE_ACCOUNT_KEY"), "object_parameters": setting("AZURE_OBJECT_PARAMETERS", {}), "azure_container": setting("AZURE_CONTAINER"), "azure_ssl": setting("AZURE_SSL", True), "upload_max_conn": setting("AZURE_UPLOAD_MAX_CONN", 2), "timeout": setting("AZURE_CONNECTION_TIMEOUT_SECS", 20), "max_memory_size": setting("AZURE_BLOB_MAX_MEMORY_SIZE", 2 * 1024 * 1024), "expiration_secs": setting("AZURE_URL_EXPIRATION_SECS"), "overwrite_files": setting("AZURE_OVERWRITE_FILES", False), "location": setting("AZURE_LOCATION", ""), "default_content_type": "application/octet-stream", "cache_control": setting("AZURE_CACHE_CONTROL"), "sas_token": setting("AZURE_SAS_TOKEN"), "endpoint_suffix": setting("AZURE_ENDPOINT_SUFFIX", "core.windows.net"), "custom_domain": setting("AZURE_CUSTOM_DOMAIN"), "connection_string": setting("AZURE_CONNECTION_STRING"), "token_credential": setting("AZURE_TOKEN_CREDENTIAL"), "api_version": setting("AZURE_API_VERSION", None), "client_options": setting("AZURE_CLIENT_OPTIONS", {}), } def _get_service_client(self): if self.connection_string is not None: return BlobServiceClient.from_connection_string(self.connection_string) account_domain = "{}.blob.{}".format(self.account_name, self.endpoint_suffix) account_url = "{}://{}".format(self.azure_protocol, account_domain) credential = None if self.account_key: credential = { "account_name": self.account_name, "account_key": self.account_key, } elif self.sas_token: credential = self.sas_token elif self.token_credential: credential = self.token_credential options = self.client_options if self.api_version: warnings.warn( "The AZURE_API_VERSION/api_version setting is deprecated " "and will be removed in a future version. Use AZURE_CLIENT_OPTIONS " "to customize any of the BlobServiceClient kwargs.", DeprecationWarning, ) options["api_version"] = self.api_version return BlobServiceClient(account_url, credential=credential, **options) @property def service_client(self): if self._service_client is None: self._service_client = self._get_service_client() return self._service_client @property def client(self): if self._client is None: self._client = self.service_client.get_container_client( self.azure_container ) return self._client def get_user_delegation_key(self, expiry): # We'll only be able to get a user delegation key if we've authenticated with a # token credential. if self.token_credential is None: return None # Get a new key if we don't already have one, or if the one we have expires too # soon. if ( self._user_delegation_key is None or expiry > self._user_delegation_key_expiry ): now = datetime.utcnow() key_expiry_time = now + timedelta(days=7) self._user_delegation_key = self.service_client.get_user_delegation_key( key_start_time=now, key_expiry_time=key_expiry_time ) self._user_delegation_key_expiry = key_expiry_time return self._user_delegation_key @property def azure_protocol(self): if self.azure_ssl: return "https" else: return "http" def _normalize_name(self, name): try: return safe_join(self.location, name) except ValueError: raise SuspiciousOperation("Attempted access to '%s' denied." % name) def _get_valid_path(self, name): # Must be idempotent return _get_valid_path(self._normalize_name(clean_name(name))) def _open(self, name, mode="rb"): return AzureStorageFile(name, mode, self) def get_available_name(self, name, max_length=_AZURE_NAME_MAX_LEN): """ Returns a filename that's free on the target storage system, and available for new content to be written to. """ name = clean_name(name) if self.overwrite_files: return get_available_overwrite_name(name, max_length) return super().get_available_name(name, max_length) def exists(self, name): if not name: return True blob_client = self.client.get_blob_client(self._get_valid_path(name)) return blob_client.exists() def delete(self, name): try: self.client.delete_blob(self._get_valid_path(name), timeout=self.timeout) except ResourceNotFoundError: pass def size(self, name): blob_client = self.client.get_blob_client(self._get_valid_path(name)) properties = blob_client.get_blob_properties(timeout=self.timeout) return properties.size def _save(self, name, content): cleaned_name = clean_name(name) name = self._get_valid_path(name) params = self._get_content_settings_parameters(name, content) # Unwrap django file (wrapped by parent's save call) if isinstance(content, File): content = content.file content.seek(0) self.client.upload_blob( name, content, content_settings=ContentSettings(**params), max_concurrency=self.upload_max_conn, timeout=self.timeout, overwrite=self.overwrite_files, ) return cleaned_name def _expire_at(self, expire): # azure expects time in UTC return datetime.utcnow() + timedelta(seconds=expire) def url(self, name, expire=None, parameters=None, mode="r"): name = self._get_valid_path(name) params = parameters or {} permission = BlobSasPermissions.from_string(mode) if expire is None: expire = self.expiration_secs credential = None if expire: expiry = self._expire_at(expire) user_delegation_key = self.get_user_delegation_key(expiry) sas_token = generate_blob_sas( self.account_name, self.azure_container, name, account_key=self.account_key, user_delegation_key=user_delegation_key, permission=permission, expiry=expiry, **params, ) credential = sas_token container_blob_url = self.client.get_blob_client(name).url if self.custom_domain: # Replace the account name with the custom domain parsed_url = urlparse(container_blob_url) container_blob_url = urlunparse( parsed_url._replace(netloc=self.custom_domain) ) return BlobClient.from_blob_url(container_blob_url, credential=credential).url def _get_content_settings_parameters(self, name, content=None): params = {} guessed_type, content_encoding = mimetypes.guess_type(name) content_type = ( _content_type(content) or guessed_type or self.default_content_type ) params["cache_control"] = self.cache_control params["content_type"] = content_type params["content_encoding"] = content_encoding params.update(self.get_object_parameters(name)) return params def get_object_parameters(self, name): """ Returns a dictionary that is passed to content settings. Override this method to adjust this on a per-object basis to set e.g ContentDisposition. By default, returns the value of AZURE_OBJECT_PARAMETERS. """ return self.object_parameters.copy() def get_modified_time(self, name): """ Returns an (aware) datetime object containing the last modified time if USE_TZ is True, otherwise returns a naive datetime in the local timezone. """ blob_client = self.client.get_blob_client(self._get_valid_path(name)) properties = blob_client.get_blob_properties(timeout=self.timeout) if not setting("USE_TZ", False): return timezone.make_naive(properties.last_modified) tz = timezone.get_current_timezone() if timezone.is_naive(properties.last_modified): return timezone.make_aware(properties.last_modified, tz) # `last_modified` is in UTC time_zone, we # must convert it to settings time_zone return properties.last_modified.astimezone(tz) def list_all(self, path=""): """Return all files for a given path""" if path: path = self._get_valid_path(path) if path and not path.endswith("/"): path += "/" # XXX make generator, add start, end return [ blob.name for blob in self.client.list_blobs( name_starts_with=path, timeout=self.timeout ) ] def listdir(self, path=""): """ Return all files for a given path. Given that Azure can't return paths it only returns files. Works great for our little adventure. """ return [], self.list_all(path) django-storages-1.14.5/storages/backends/dropbox.py000066400000000000000000000153631475414346200223430ustar00rootroot00000000000000# Dropbox storage class for Django pluggable storage system. # Author: Anthony Monthe # License: BSD import warnings from io import BytesIO from shutil import copyfileobj from tempfile import SpooledTemporaryFile from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.utils._os import safe_join from django.utils.deconstruct import deconstructible from dropbox import Dropbox from dropbox.exceptions import ApiError from dropbox.files import CommitInfo from dropbox.files import FolderMetadata from dropbox.files import UploadSessionCursor from dropbox.files import WriteMode from storages.base import BaseStorage from storages.utils import get_available_overwrite_name from storages.utils import setting _DEFAULT_TIMEOUT = 100 _DEFAULT_MODE = "add" class DropboxStorageException(Exception): pass DropBoxStorageException = DropboxStorageException class DropboxFile(File): def __init__(self, name, storage): self.name = name self._storage = storage self._file = None def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile() # As dropbox==9.3.0, the client returns a tuple # (dropbox.files.FileMetadata, requests.models.Response) file_metadata, response = self._storage.client.files_download(self.name) if response.status_code == 200: with BytesIO(response.content) as file_content: copyfileobj(file_content, self._file) else: # JIC the exception isn't caught by the dropbox client raise DropboxStorageException( "Dropbox server returned a {} response when accessing {}".format( response.status_code, self.name ) ) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) DropBoxFile = DropboxFile @deconstructible class DropboxStorage(BaseStorage): """Dropbox Storage class for Django pluggable storage system.""" CHUNK_SIZE = 4 * 1024 * 1024 def __init__(self, oauth2_access_token=None, **settings): if oauth2_access_token is not None: settings["oauth2_access_token"] = oauth2_access_token super().__init__(**settings) if self.oauth2_access_token is None and not all( [self.app_key, self.app_secret, self.oauth2_refresh_token] ): raise ImproperlyConfigured( "You must configure an auth token at" "'settings.DROPBOX_OAUTH2_TOKEN' or " "'setting.DROPBOX_APP_KEY', " "'setting.DROPBOX_APP_SECRET' " "and 'setting.DROPBOX_OAUTH2_REFRESH_TOKEN'." ) self.client = Dropbox( self.oauth2_access_token, app_key=self.app_key, app_secret=self.app_secret, oauth2_refresh_token=self.oauth2_refresh_token, timeout=self.timeout, ) # Backwards compat if hasattr(self, "location"): warnings.warn( "Setting `root_path` with name `location` is deprecated and will be " "removed in a future version of django-storages. Please update the " "name from `location` to `root_path`", DeprecationWarning, ) self.root_path = self.location def get_default_settings(self): return { "root_path": setting("DROPBOX_ROOT_PATH", "/"), "oauth2_access_token": setting("DROPBOX_OAUTH2_TOKEN"), "app_key": setting("DROPBOX_APP_KEY"), "app_secret": setting("DROPBOX_APP_SECRET"), "oauth2_refresh_token": setting("DROPBOX_OAUTH2_REFRESH_TOKEN"), "timeout": setting("DROPBOX_TIMEOUT", _DEFAULT_TIMEOUT), "write_mode": setting("DROPBOX_WRITE_MODE", _DEFAULT_MODE), } def _full_path(self, name): if name == "/": name = "" return safe_join(self.root_path, name).replace("\\", "/") def delete(self, name): self.client.files_delete(self._full_path(name)) def exists(self, name): try: return bool(self.client.files_get_metadata(self._full_path(name))) except ApiError: return False def listdir(self, path): directories, files = [], [] full_path = self._full_path(path) if full_path == "/": full_path = "" metadata = self.client.files_list_folder(full_path) for entry in metadata.entries: if isinstance(entry, FolderMetadata): directories.append(entry.name) else: files.append(entry.name) return directories, files def size(self, name): metadata = self.client.files_get_metadata(self._full_path(name)) return metadata.size def url(self, name): try: media = self.client.files_get_temporary_link(self._full_path(name)) return media.link except ApiError: return None def _open(self, name, mode="rb"): remote_file = DropboxFile(self._full_path(name), self) return remote_file def _save(self, name, content): content.open() if content.size <= self.CHUNK_SIZE: self.client.files_upload( content.read(), self._full_path(name), mode=WriteMode(self.write_mode) ) else: self._chunked_upload(content, self._full_path(name)) content.close() return name def _chunked_upload(self, content, dest_path): upload_session = self.client.files_upload_session_start( content.read(self.CHUNK_SIZE) ) cursor = UploadSessionCursor( session_id=upload_session.session_id, offset=content.tell() ) commit = CommitInfo(path=dest_path, mode=WriteMode(self.write_mode)) while content.tell() < content.size: if (content.size - content.tell()) <= self.CHUNK_SIZE: self.client.files_upload_session_finish( content.read(self.CHUNK_SIZE), cursor, commit ) else: self.client.files_upload_session_append_v2( content.read(self.CHUNK_SIZE), cursor ) cursor.offset = content.tell() def get_available_name(self, name, max_length=None): """Overwrite existing file with the same name.""" if self.write_mode == "overwrite": return get_available_overwrite_name(name, max_length) return super().get_available_name(name, max_length) DropBoxStorage = DropboxStorage django-storages-1.14.5/storages/backends/ftp.py000066400000000000000000000226641475414346200214610ustar00rootroot00000000000000# FTP storage class for Django pluggable storage system. # Author: Rafal Jonca # License: MIT # Comes from http://www.djangosnippets.org/snippets/1269/ # # Usage: # # Add below to settings.py: # FTP_STORAGE_LOCATION = '[a]ftp[s]://:@:/[path]' # # In models.py you can write: # from FTPStorage import FTPStorage # fs = FTPStorage() # For a TLS configuration, you must use 'ftps' protocol # class FTPTest(models.Model): # file = models.FileField(upload_to='a/b/c/', storage=fs) import ftplib import io import os import re import urllib.parse from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.utils.deconstruct import deconstructible from storages.base import BaseStorage from storages.utils import setting class FTPStorageException(Exception): pass @deconstructible class FTPStorage(BaseStorage): """FTP Storage class for Django pluggable storage system.""" def __init__(self, **settings): super().__init__(**settings) if self.location is None: raise ImproperlyConfigured( "You must set a location at instantiation " "or at settings.FTP_STORAGE_LOCATION." ) self._config = self._decode_location(self.location) self._connection = None def get_default_settings(self): return { "location": setting("FTP_STORAGE_LOCATION"), "encoding": setting("FTP_STORAGE_ENCODING", "latin-1"), "base_url": setting("BASE_URL", settings.MEDIA_URL), "allow_overwrite": setting("FTP_ALLOW_OVERWRITE", False), } def _decode_location(self, location): """Return splitted configuration data from location.""" splitted_url = re.search( r"^(?P.+)://(?P.+):(?P.+)@" r"(?P.+):(?P\d+)/(?P.*)$", location, ) if splitted_url is None: raise ImproperlyConfigured("Improperly formatted location URL") if splitted_url["scheme"] not in ("ftp", "aftp", "ftps"): raise ImproperlyConfigured("Only ftp, aftp, ftps schemes supported") if splitted_url["host"] == "": raise ImproperlyConfigured("You must at least provide host!") config = {} config["active"] = splitted_url["scheme"] == "aftp" config["secure"] = splitted_url["scheme"] == "ftps" config["path"] = splitted_url["path"] or "/" config["host"] = splitted_url["host"] config["user"] = splitted_url["user"] config["passwd"] = splitted_url["passwd"] config["port"] = int(splitted_url["port"]) return config def _start_connection(self): # Check if connection is still alive and if not, drop it. if self._connection is not None: try: self._connection.pwd() except ftplib.all_errors: self._connection = None # Real reconnect if self._connection is None: ftp = ftplib.FTP_TLS() if self._config["secure"] else ftplib.FTP() ftp.encoding = self.encoding try: ftp.connect(self._config["host"], self._config["port"]) ftp.login(self._config["user"], self._config["passwd"]) if self._config["secure"]: ftp.prot_p() if self._config["active"]: ftp.set_pasv(False) if self._config["path"] != "": ftp.cwd(self._config["path"]) self._connection = ftp return except ftplib.all_errors: raise FTPStorageException( "Connection or login error using data %s" % repr(self._config) ) def disconnect(self): self._connection.quit() self._connection = None def _mkremdirs(self, path): pwd = self._connection.pwd() path_splitted = path.split(os.path.sep) for path_part in path_splitted: try: self._connection.cwd(path_part) except ftplib.all_errors: try: self._connection.mkd(path_part) self._connection.cwd(path_part) except ftplib.all_errors: raise FTPStorageException("Cannot create directory chain %s" % path) self._connection.cwd(pwd) def _put_file(self, name, content): # Connection must be open! try: self._mkremdirs(os.path.dirname(name)) pwd = self._connection.pwd() self._connection.cwd(os.path.dirname(name)) self._connection.storbinary( "STOR " + os.path.basename(name), content.file, content.DEFAULT_CHUNK_SIZE, ) self._connection.cwd(pwd) except ftplib.all_errors: raise FTPStorageException("Error writing file %s" % name) def _open(self, name, mode="rb"): remote_file = FTPStorageFile(name, self, mode=mode) return remote_file def _read(self, name): memory_file = io.BytesIO() try: pwd = self._connection.pwd() self._connection.cwd(os.path.dirname(name)) self._connection.retrbinary( "RETR " + os.path.basename(name), memory_file.write ) self._connection.cwd(pwd) memory_file.seek(0) return memory_file except ftplib.all_errors: raise FTPStorageException("Error reading file %s" % name) def _save(self, name, content): content.open() self._start_connection() self._put_file(name, content) content.close() return name def _get_dir_details(self, path): # Connection must be open! try: lines = [] self._connection.retrlines("LIST " + path, lines.append) dirs = {} files = {} for line in lines: words = line.split() if len(words) < 6: continue if words[-2] == "->": continue if words[0][0] == "d": dirs[words[-1]] = 0 elif words[0][0] == "-": files[words[-1]] = int(words[-5]) return dirs, files except ftplib.all_errors: raise FTPStorageException("Error getting listing for %s" % path) def listdir(self, path): self._start_connection() try: dirs, files = self._get_dir_details(path) return list(dirs.keys()), list(files.keys()) except FTPStorageException: raise def delete(self, name): if not self.exists(name): return self._start_connection() try: self._connection.delete(name) except ftplib.all_errors: raise FTPStorageException("Error when removing %s" % name) def exists(self, name): if self.allow_overwrite: return False self._start_connection() try: nlst = self._connection.nlst(os.path.dirname(name)) if name in nlst or os.path.basename(name) in nlst: return True else: return False except ftplib.error_temp: return False except ftplib.error_perm: # error_perm: 550 Can't find file return False except ftplib.all_errors: raise FTPStorageException("Error when testing existence of %s" % name) def size(self, name): self._start_connection() try: dirs, files = self._get_dir_details(os.path.dirname(name)) if os.path.basename(name) in files: return files[os.path.basename(name)] else: return 0 except FTPStorageException: return 0 def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") return urllib.parse.urljoin(self.base_url, name).replace("\\", "/") class FTPStorageFile(File): def __init__(self, name, storage, mode): self.name = name self._storage = storage self._mode = mode self._is_dirty = False self.file = io.BytesIO() self._is_read = False @property def size(self): if not hasattr(self, "_size"): self._size = self._storage.size(self.name) return self._size def readlines(self): if not self._is_read: self._storage._start_connection() self.file = self._storage._read(self.name) self._is_read = True return self.file.readlines() def read(self, num_bytes=None): if not self._is_read: self._storage._start_connection() self.file = self._storage._read(self.name) self._is_read = True return self.file.read(num_bytes) def write(self, content): if "w" not in self._mode: raise AttributeError("File was opened for read-only access.") self.file = io.BytesIO(content) self._is_dirty = True self._is_read = True def close(self): if self._is_dirty: self._storage._start_connection() self._storage._put_file(self.name, self) self._storage.disconnect() self.file.close() django-storages-1.14.5/storages/backends/gcloud.py000066400000000000000000000271061475414346200221410ustar00rootroot00000000000000import gzip import io import mimetypes from datetime import timedelta from tempfile import SpooledTemporaryFile from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import SuspiciousOperation from django.core.files.base import File from django.utils import timezone from django.utils.deconstruct import deconstructible from storages.base import BaseStorage from storages.compress import CompressedFileMixin from storages.utils import check_location from storages.utils import clean_name from storages.utils import get_available_overwrite_name from storages.utils import safe_join from storages.utils import setting from storages.utils import to_bytes try: from google.cloud.exceptions import NotFound from google.cloud.storage import Blob from google.cloud.storage import Client from google.cloud.storage.blob import _quote from google.cloud.storage.retry import DEFAULT_RETRY except ImportError: raise ImproperlyConfigured( "Could not load Google Cloud Storage bindings.\n" "See https://github.com/GoogleCloudPlatform/gcloud-python" ) CONTENT_ENCODING = "content_encoding" CONTENT_TYPE = "content_type" class GoogleCloudFile(CompressedFileMixin, File): def __init__(self, name, mode, storage): self.name = name self.mime_type, self.mime_encoding = mimetypes.guess_type(name) self._mode = mode self._storage = storage self.blob = storage.bucket.get_blob(name, chunk_size=storage.blob_chunk_size) if not self.blob and "w" in mode: self.blob = Blob( self.name, storage.bucket, chunk_size=storage.blob_chunk_size ) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR"), ) if "r" in self._mode: self._is_dirty = False # This automatically decompresses the file self.blob.download_to_file(self._file, checksum="crc32c") self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, num_bytes=None): if "r" not in self._mode: raise AttributeError("File was not opened in read mode.") if num_bytes is None: num_bytes = -1 return super().read(num_bytes) def write(self, content): if "w" not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super().write(to_bytes(content)) def close(self): if self._file is not None: if self._is_dirty: blob_params = self._storage.get_object_parameters(self.name) self.blob.upload_from_file( self.file, rewind=True, content_type=self.mime_type, retry=DEFAULT_RETRY, predefined_acl=blob_params.get("acl", self._storage.default_acl), ) self._file.close() self._file = None @deconstructible class GoogleCloudStorage(BaseStorage): def __init__(self, **settings): super().__init__(**settings) check_location(self) self._bucket = None self._client = None def get_default_settings(self): return { "project_id": setting("GS_PROJECT_ID"), "credentials": setting("GS_CREDENTIALS"), "bucket_name": setting("GS_BUCKET_NAME"), "custom_endpoint": setting("GS_CUSTOM_ENDPOINT", None), "location": setting("GS_LOCATION", ""), "default_acl": setting("GS_DEFAULT_ACL"), "querystring_auth": setting("GS_QUERYSTRING_AUTH", True), "expiration": setting("GS_EXPIRATION", timedelta(seconds=86400)), "gzip": setting("GS_IS_GZIPPED", False), "gzip_content_types": setting( "GZIP_CONTENT_TYPES", ( "text/css", "text/javascript", "application/javascript", "application/x-javascript", "image/svg+xml", ), ), "file_overwrite": setting("GS_FILE_OVERWRITE", True), "object_parameters": setting("GS_OBJECT_PARAMETERS", {}), # The max amount of memory a returned file can take up before being # rolled over into a temporary file on disk. Default is 0: Do not # roll over. "max_memory_size": setting("GS_MAX_MEMORY_SIZE", 0), "blob_chunk_size": setting("GS_BLOB_CHUNK_SIZE"), } @property def client(self): if self._client is None: self._client = Client(project=self.project_id, credentials=self.credentials) return self._client @property def bucket(self): if self._bucket is None: self._bucket = self.client.bucket(self.bucket_name) return self._bucket def _normalize_name(self, name): """ Normalizes the name so that paths like /path/to/ignored/../something.txt and ./file.txt work. Note that clean_name adds ./ to some paths so they need to be fixed here. We check to make sure that the path pointed to is not outside the directory specified by the LOCATION setting. """ try: return safe_join(self.location, name) except ValueError: raise SuspiciousOperation("Attempted access to '%s' denied." % name) def _open(self, name, mode="rb"): name = self._normalize_name(clean_name(name)) file_object = GoogleCloudFile(name, mode, self) if not file_object.blob: raise FileNotFoundError("File does not exist: %s" % name) return file_object def _compress_content(self, content): content.seek(0) zbuf = io.BytesIO() with gzip.GzipFile(mode="wb", fileobj=zbuf, mtime=0.0) as zfile: zfile.write(to_bytes(content.read())) zbuf.seek(0) return zbuf def _save(self, name, content): cleaned_name = clean_name(name) name = self._normalize_name(cleaned_name) content.name = cleaned_name file_object = GoogleCloudFile(name, "rw", self) blob_params = self.get_object_parameters(name) if file_object.mime_encoding and CONTENT_ENCODING not in blob_params: blob_params[CONTENT_ENCODING] = file_object.mime_encoding upload_params = {} upload_params["predefined_acl"] = blob_params.pop("acl", self.default_acl) upload_params[CONTENT_TYPE] = blob_params.pop( CONTENT_TYPE, file_object.mime_type ) if ( self.gzip and upload_params[CONTENT_TYPE] in self.gzip_content_types and CONTENT_ENCODING not in blob_params ): content = self._compress_content(content) blob_params[CONTENT_ENCODING] = "gzip" for prop, val in blob_params.items(): setattr(file_object.blob, prop, val) file_object.blob.upload_from_file( content, rewind=True, retry=DEFAULT_RETRY, size=getattr(content, "size", None), **upload_params, ) return cleaned_name def get_object_parameters(self, name): """Override this to return a dictionary of overwritable blob-property to value. Returns GS_OBJECT_PARAMETERS by default. See the docs for all possible options. """ object_parameters = self.object_parameters.copy() return object_parameters def delete(self, name): name = self._normalize_name(clean_name(name)) try: self.bucket.delete_blob(name, retry=DEFAULT_RETRY) except NotFound: pass def exists(self, name): if not name: # root element aka the bucket try: self.client.get_bucket(self.bucket) return True except NotFound: return False name = self._normalize_name(clean_name(name)) return bool(self.bucket.get_blob(name)) def listdir(self, name): name = self._normalize_name(clean_name(name)) # For bucket.list_blobs and logic below name needs to end in / # but for the root path "" we leave it as an empty string if name and not name.endswith("/"): name += "/" iterator = self.bucket.list_blobs(prefix=name, delimiter="/") blobs = list(iterator) prefixes = iterator.prefixes files = [] dirs = [] for blob in blobs: parts = blob.name.split("/") files.append(parts[-1]) for folder_path in prefixes: parts = folder_path.split("/") dirs.append(parts[-2]) return list(dirs), files def _get_blob(self, name): # Wrap google.cloud.storage's blob to raise if the file doesn't exist blob = self.bucket.get_blob(name) if blob is None: raise NotFound("File does not exist: {}".format(name)) return blob def size(self, name): name = self._normalize_name(clean_name(name)) blob = self._get_blob(name) return blob.size def get_modified_time(self, name): name = self._normalize_name(clean_name(name)) blob = self._get_blob(name) updated = blob.updated return updated if setting("USE_TZ") else timezone.make_naive(updated) def get_created_time(self, name): """ Return the creation time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ name = self._normalize_name(clean_name(name)) blob = self._get_blob(name) created = blob.time_created return created if setting("USE_TZ") else timezone.make_naive(created) def url(self, name, parameters=None): """ Return public URL or a signed URL for the Blob. To keep things snappy, the existence of blobs for public URLs is not checked. """ name = self._normalize_name(clean_name(name)) blob = self.bucket.blob(name) blob_params = self.get_object_parameters(name) no_signed_url = ( blob_params.get("acl", self.default_acl) == "publicRead" or not self.querystring_auth ) if not self.custom_endpoint and no_signed_url: return blob.public_url elif no_signed_url: return "{storage_base_url}/{quoted_name}".format( storage_base_url=self.custom_endpoint, quoted_name=_quote(name, safe=b"/~"), ) else: default_params = { "bucket_bound_hostname": self.custom_endpoint, "expiration": self.expiration, "version": "v4", } params = parameters or {} for key, value in default_params.items(): if value and key not in params: params[key] = value return blob.generate_signed_url(**params) def get_available_name(self, name, max_length=None): name = clean_name(name) if self.file_overwrite: return get_available_overwrite_name(name, max_length) return super().get_available_name(name, max_length) django-storages-1.14.5/storages/backends/s3.py000066400000000000000000000644501475414346200212140ustar00rootroot00000000000000import io import mimetypes import os import posixpath import tempfile import threading import warnings from datetime import datetime from datetime import timedelta from urllib.parse import urlencode from django.contrib.staticfiles.storage import ManifestFilesMixin from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import SuspiciousOperation from django.core.files.base import File from django.utils.deconstruct import deconstructible from django.utils.encoding import filepath_to_uri from django.utils.timezone import make_naive from storages.base import BaseStorage from storages.compress import CompressedFileMixin from storages.compress import CompressStorageMixin from storages.utils import ReadBytesWrapper from storages.utils import check_location from storages.utils import clean_name from storages.utils import get_available_overwrite_name from storages.utils import is_seekable from storages.utils import lookup_env from storages.utils import safe_join from storages.utils import setting from storages.utils import to_bytes try: import boto3.session import botocore import s3transfer.constants from boto3.s3.transfer import TransferConfig from botocore.config import Config from botocore.exceptions import ClientError from botocore.signers import CloudFrontSigner except ImportError as e: raise ImproperlyConfigured("Could not load Boto3's S3 bindings. %s" % e) # NOTE: these are defined as functions so both can be tested def _use_cryptography_signer(): # https://cryptography.io as an RSA backend from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives.serialization import load_pem_private_key def _cloud_front_signer_from_pem(key_id, pem): if isinstance(pem, str): pem = pem.encode("ascii") key = load_pem_private_key(pem, password=None, backend=default_backend()) return CloudFrontSigner( key_id, lambda x: key.sign(x, padding.PKCS1v15(), hashes.SHA1()) ) return _cloud_front_signer_from_pem def _use_rsa_signer(): # https://stuvel.eu/rsa as an RSA backend import rsa def _cloud_front_signer_from_pem(key_id, pem): if isinstance(pem, str): pem = pem.encode("ascii") key = rsa.PrivateKey.load_pkcs1(pem) return CloudFrontSigner(key_id, lambda x: rsa.sign(x, key, "SHA-1")) return _cloud_front_signer_from_pem for _signer_factory in (_use_cryptography_signer, _use_rsa_signer): try: _cloud_front_signer_from_pem = _signer_factory() break except ImportError: pass else: def _cloud_front_signer_from_pem(key_id, pem): raise ImproperlyConfigured( "An RSA backend is required for signing cloudfront URLs.\n" "Supported backends are packages: cryptography and rsa." ) def _filter_download_params(params): return { key: value for (key, value) in params.items() if key in s3transfer.constants.ALLOWED_DOWNLOAD_ARGS } @deconstructible class S3File(CompressedFileMixin, File): """ The default file object used by the S3Storage backend. This file implements file streaming using boto's multipart uploading functionality. The file can be opened in read or write mode. This class extends Django's File class. However, the contained data is only the data contained in the current buffer. So you should not access the contained file object directly. You should access the data via this class. Warning: This file *must* be closed using the close() method in order to properly write the file to S3. Be sure to close the file in your application. """ def __init__(self, name, mode, storage, buffer_size=None): if "r" in mode and "w" in mode: raise ValueError("Can't combine 'r' and 'w' in mode.") self._storage = storage self.name = name[len(self._storage.location) :].lstrip("/") self._mode = mode self.obj = storage.bucket.Object(name) if "w" not in mode: # Force early RAII-style exception if object does not exist params = _filter_download_params( self._storage.get_object_parameters(self.name) ) self.obj.load(**params) self._closed = False self._file = None self._parts = None # 5 MB is the minimum part size (if there is more than one part). # Amazon allows up to 10,000 parts. The default supports uploads # up to roughly 50 GB. Increase the part size to accommodate # for files larger than this. self.buffer_size = buffer_size or setting("AWS_S3_FILE_BUFFER_SIZE", 5242880) self._reset_file_properties() def _reset_file_properties(self): self._multipart = None self._raw_bytes_written = 0 self._write_counter = 0 self._is_dirty = False def open(self, mode=None): if self._file is not None and not self.closed: self.seek(0) # Mirror Django's behavior elif mode and mode != self._mode: raise ValueError("Cannot reopen file with a new mode.") # Accessing the file will functionally re-open it self.file # noqa: B018 return self @property def size(self): return self.obj.content_length @property def closed(self): return self._closed def _get_file(self): if self._file is None: self._file = tempfile.SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".S3File", dir=setting("FILE_UPLOAD_TEMP_DIR"), ) if "r" in self._mode: self._is_dirty = False params = _filter_download_params( self._storage.get_object_parameters(self.name) ) self.obj.download_fileobj( self._file, ExtraArgs=params, Config=self._storage.transfer_config ) self._file.seek(0) if self._storage.gzip and self.obj.content_encoding == "gzip": self._file = self._decompress_file(mode=self._mode, file=self._file) elif "b" not in self._mode: if hasattr(self._file, "readable"): # For versions > Python 3.10 compatibility # See SpooledTemporaryFile changes in 3.11 (https://docs.python.org/3/library/tempfile.html) # noqa: E501 # Now fully implements the io.BufferedIOBase and io.TextIOBase abstract base classes allowing the file # noqa: E501 # to be readable in the mode that it was specified (without accessing the underlying _file object). # noqa: E501 # In this case, we need to wrap the file in a TextIOWrapper to ensure that the file is read as a text file. # noqa: E501 self._file = io.TextIOWrapper(self._file, encoding="utf-8") else: # For versions <= Python 3.10 compatibility self._file = io.TextIOWrapper( self._file._file, encoding="utf-8" ) self._closed = False return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, *args, **kwargs): if "r" not in self._mode: raise AttributeError("File was not opened in read mode.") return super().read(*args, **kwargs) def readline(self, *args, **kwargs): if "r" not in self._mode: raise AttributeError("File was not opened in read mode.") return super().readline(*args, **kwargs) def readlines(self): return list(self) def write(self, content): if "w" not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True if self._multipart is None: self._multipart = self.obj.initiate_multipart_upload( **self._storage._get_write_parameters(self.obj.key) ) self._parts = [] if self.buffer_size <= self._buffer_file_size: self._flush_write_buffer() bstr = to_bytes(content) self._raw_bytes_written += len(bstr) return super().write(bstr) @property def _buffer_file_size(self): pos = self.file.tell() self.file.seek(0, os.SEEK_END) length = self.file.tell() self.file.seek(pos) return length def _flush_write_buffer(self): if self._buffer_file_size: self._write_counter += 1 self.file.seek(0) part = self._multipart.Part(self._write_counter) response = part.upload(Body=self.file.read()) self._parts.append( {"ETag": response["ETag"], "PartNumber": self._write_counter} ) self.file.seek(0) self.file.truncate() def _create_empty_on_close(self): """ Attempt to create an empty file for this key when this File is closed if no bytes have been written and no object already exists on S3 for this key. This behavior is meant to mimic the behavior of Django's builtin FileSystemStorage, where files are always created after they are opened in write mode: f = storage.open('file.txt', mode='w') f.close() """ assert "w" in self._mode assert self._raw_bytes_written == 0 try: # Check if the object exists on the server; if so, don't do anything self.obj.load() except ClientError as err: if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: self.obj.put( Body=b"", **self._storage._get_write_parameters(self.obj.key) ) else: raise def close(self): if self._is_dirty: if self._multipart is not None: self._flush_write_buffer() self._multipart.complete(MultipartUpload={"Parts": self._parts}) else: if self._multipart is not None: self._multipart.abort() if "w" in self._mode and self._raw_bytes_written == 0: self._create_empty_on_close() if self._file is not None: self._file.close() self._file = None self._reset_file_properties() self._closed = True @deconstructible class S3Storage(CompressStorageMixin, BaseStorage): """ Amazon Simple Storage Service using Boto3 This storage backend supports opening files in read or write mode and supports streaming(buffering) data in chunks to S3 when writing. """ default_content_type = "application/octet-stream" # If config provided in subclass, signature_version and addressing_style # settings/args are ignored. config = None _signers = {} # noqa: RUF012 def __init__(self, **settings): omitted = object() if not hasattr(self, "cloudfront_signer"): self.cloudfront_signer = settings.pop("cloudfront_signer", omitted) super().__init__(**settings) check_location(self) if (self.access_key or self.secret_key) and self.session_profile: raise ImproperlyConfigured( "AWS_S3_SESSION_PROFILE/session_profile should not be provided with " "AWS_S3_ACCESS_KEY_ID/access_key and " "AWS_S3_SECRET_ACCESS_KEY/secret_key" ) self._bucket = None self._connections = threading.local() self._unsigned_connections = threading.local() if self.config is not None: warnings.warn( "The 'config' class property is deprecated and will be " "removed in a future version. Use AWS_S3_CLIENT_CONFIG " "to customize any of the botocore.config.Config parameters.", DeprecationWarning, ) self.client_config = self.config if self.client_config is None: self.client_config = Config( s3={"addressing_style": self.addressing_style}, signature_version=self.signature_version, proxies=self.proxies, ) if self.use_threads is False: warnings.warn( "The AWS_S3_USE_THREADS setting is deprecated. Use " "AWS_S3_TRANSFER_CONFIG to customize any of the " "boto.s3.transfer.TransferConfig parameters.", DeprecationWarning, ) if self.transfer_config is None: self.transfer_config = TransferConfig(use_threads=self.use_threads) if self.cloudfront_signer is omitted: if self.cloudfront_key_id and self.cloudfront_key: self.cloudfront_signer = self.get_cloudfront_signer( self.cloudfront_key_id, self.cloudfront_key ) elif bool(self.cloudfront_key_id) ^ bool(self.cloudfront_key): raise ImproperlyConfigured( "Both AWS_CLOUDFRONT_KEY_ID/cloudfront_key_id and " "AWS_CLOUDFRONT_KEY/cloudfront_key must be provided together." ) else: self.cloudfront_signer = None def get_cloudfront_signer(self, key_id, key): cache_key = f"{key_id}:{key}" if cache_key not in self.__class__._signers: self.__class__._signers[cache_key] = _cloud_front_signer_from_pem( key_id, key ) return self.__class__._signers[cache_key] def get_default_settings(self): return { "access_key": setting( "AWS_S3_ACCESS_KEY_ID", setting( "AWS_ACCESS_KEY_ID", lookup_env(["AWS_S3_ACCESS_KEY_ID", "AWS_ACCESS_KEY_ID"]), ), ), "secret_key": setting( "AWS_S3_SECRET_ACCESS_KEY", setting( "AWS_SECRET_ACCESS_KEY", lookup_env(["AWS_S3_SECRET_ACCESS_KEY", "AWS_SECRET_ACCESS_KEY"]), ), ), "security_token": setting( "AWS_SESSION_TOKEN", setting( "AWS_SECURITY_TOKEN", lookup_env(["AWS_SESSION_TOKEN", "AWS_SECURITY_TOKEN"]), ), ), "session_profile": setting( "AWS_S3_SESSION_PROFILE", lookup_env(["AWS_S3_SESSION_PROFILE"]) ), "file_overwrite": setting("AWS_S3_FILE_OVERWRITE", True), "object_parameters": setting("AWS_S3_OBJECT_PARAMETERS", {}), "bucket_name": setting("AWS_STORAGE_BUCKET_NAME"), "querystring_auth": setting("AWS_QUERYSTRING_AUTH", True), "querystring_expire": setting("AWS_QUERYSTRING_EXPIRE", 3600), "signature_version": setting("AWS_S3_SIGNATURE_VERSION"), "location": setting("AWS_LOCATION", ""), "custom_domain": setting("AWS_S3_CUSTOM_DOMAIN"), "cloudfront_key_id": setting("AWS_CLOUDFRONT_KEY_ID"), "cloudfront_key": setting("AWS_CLOUDFRONT_KEY"), "addressing_style": setting("AWS_S3_ADDRESSING_STYLE"), "file_name_charset": setting("AWS_S3_FILE_NAME_CHARSET", "utf-8"), "gzip": setting("AWS_IS_GZIPPED", False), "gzip_content_types": setting( "GZIP_CONTENT_TYPES", ( "text/css", "text/javascript", "application/javascript", "application/x-javascript", "image/svg+xml", ), ), "url_protocol": setting("AWS_S3_URL_PROTOCOL", "https:"), "endpoint_url": setting("AWS_S3_ENDPOINT_URL"), "proxies": setting("AWS_S3_PROXIES"), "region_name": setting("AWS_S3_REGION_NAME"), "use_ssl": setting("AWS_S3_USE_SSL", True), "verify": setting("AWS_S3_VERIFY", None), "max_memory_size": setting("AWS_S3_MAX_MEMORY_SIZE", 0), "default_acl": setting("AWS_DEFAULT_ACL", None), "use_threads": setting("AWS_S3_USE_THREADS", True), "transfer_config": setting("AWS_S3_TRANSFER_CONFIG", None), "client_config": setting("AWS_S3_CLIENT_CONFIG", None), } def __getstate__(self): state = self.__dict__.copy() state.pop("_connections", None) state.pop("_unsigned_connections", None) state.pop("_bucket", None) return state def __setstate__(self, state): state["_connections"] = threading.local() state["_unsigned_connections"] = threading.local() state["_bucket"] = None self.__dict__ = state @property def connection(self): connection = getattr(self._connections, "connection", None) if connection is None: session = self._create_session() self._connections.connection = session.resource( "s3", region_name=self.region_name, use_ssl=self.use_ssl, endpoint_url=self.endpoint_url, config=self.client_config, verify=self.verify, ) return self._connections.connection @property def unsigned_connection(self): unsigned_connection = getattr(self._unsigned_connections, "connection", None) if unsigned_connection is None: session = self._create_session() config = self.client_config.merge( Config(signature_version=botocore.UNSIGNED) ) self._unsigned_connections.connection = session.resource( "s3", region_name=self.region_name, use_ssl=self.use_ssl, endpoint_url=self.endpoint_url, config=config, verify=self.verify, ) return self._unsigned_connections.connection def _create_session(self): """ If a user specifies a profile name and this class obtains access keys from another source such as environment variables,we want the profile name to take precedence. """ if self.session_profile: session = boto3.Session(profile_name=self.session_profile) else: session = boto3.Session( aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, aws_session_token=self.security_token, ) return session @property def bucket(self): """ Get the current bucket. If there is no current bucket object create it. """ if self._bucket is None: self._bucket = self.connection.Bucket(self.bucket_name) return self._bucket def _normalize_name(self, name): """ Normalizes the name so that paths like /path/to/ignored/../something.txt work. We check to make sure that the path pointed to is not outside the directory specified by the LOCATION setting. """ try: return safe_join(self.location, name) except ValueError: raise SuspiciousOperation("Attempted access to '%s' denied." % name) def _open(self, name, mode="rb"): name = self._normalize_name(clean_name(name)) try: f = S3File(name, mode, self) except ClientError as err: if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: raise FileNotFoundError("File does not exist: %s" % name) raise # Let it bubble up if it was some other error return f def _save(self, name, content): cleaned_name = clean_name(name) name = self._normalize_name(cleaned_name) params = self._get_write_parameters(name, content) if is_seekable(content): content.seek(0, os.SEEK_SET) # wrap content so read() always returns bytes. This is required for passing it # to obj.upload_fileobj() or self._compress_content() content = ReadBytesWrapper(content) if ( self.gzip and params["ContentType"] in self.gzip_content_types and "ContentEncoding" not in params ): content = self._compress_content(content) params["ContentEncoding"] = "gzip" obj = self.bucket.Object(name) # Workaround file being closed errantly see: https://github.com/boto/s3transfer/issues/80 original_close = content.close content.close = lambda: None try: obj.upload_fileobj(content, ExtraArgs=params, Config=self.transfer_config) finally: content.close = original_close return cleaned_name def delete(self, name): try: name = self._normalize_name(clean_name(name)) self.bucket.Object(name).delete() except ClientError as err: if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: # Not an error to delete something that does not exist return # Some other error was encountered. Re-raise it raise def exists(self, name): name = self._normalize_name(clean_name(name)) try: self.connection.meta.client.head_object(Bucket=self.bucket_name, Key=name) return True except ClientError as err: if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: return False # Some other error was encountered. Re-raise it. raise def listdir(self, name): path = self._normalize_name(clean_name(name)) # The path needs to end with a slash, but if the root is empty, leave it. if path and not path.endswith("/"): path += "/" directories = [] files = [] paginator = self.connection.meta.client.get_paginator("list_objects") pages = paginator.paginate(Bucket=self.bucket_name, Delimiter="/", Prefix=path) for page in pages: directories += [ posixpath.relpath(entry["Prefix"], path) for entry in page.get("CommonPrefixes", ()) ] for entry in page.get("Contents", ()): key = entry["Key"] if key != path: files.append(posixpath.relpath(key, path)) return directories, files def size(self, name): name = self._normalize_name(clean_name(name)) try: return self.bucket.Object(name).content_length except ClientError as err: if err.response["ResponseMetadata"]["HTTPStatusCode"] == 404: raise FileNotFoundError("File does not exist: %s" % name) raise # Let it bubble up if it was some other error def _get_write_parameters(self, name, content=None): params = self.get_object_parameters(name) if "ContentType" not in params: _type, encoding = mimetypes.guess_type(name) content_type = getattr(content, "content_type", None) content_type = content_type or _type or self.default_content_type params["ContentType"] = content_type if encoding: params["ContentEncoding"] = encoding if "ACL" not in params and self.default_acl: params["ACL"] = self.default_acl return params def get_object_parameters(self, name): """ Returns a dictionary that is passed to file upload. Override this method to adjust this on a per-object basis to set e.g ContentDisposition. By default, returns the value of AWS_S3_OBJECT_PARAMETERS. Setting ContentEncoding will prevent objects from being automatically gzipped. """ return self.object_parameters.copy() def get_modified_time(self, name): """ Returns an (aware) datetime object containing the last modified time if USE_TZ is True, otherwise returns a naive datetime in the local timezone. """ name = self._normalize_name(clean_name(name)) entry = self.bucket.Object(name) if setting("USE_TZ"): # boto3 returns TZ aware timestamps return entry.last_modified else: return make_naive(entry.last_modified) def url(self, name, parameters=None, expire=None, http_method=None): # Preserve the trailing slash after normalizing the path. name = self._normalize_name(clean_name(name)) params = parameters.copy() if parameters else {} if expire is None: expire = self.querystring_expire if self.custom_domain: url = "{}//{}/{}{}".format( self.url_protocol, self.custom_domain, filepath_to_uri(name), "?{}".format(urlencode(params)) if params else "", ) if self.querystring_auth and self.cloudfront_signer: expiration = datetime.utcnow() + timedelta(seconds=expire) return self.cloudfront_signer.generate_presigned_url( url, date_less_than=expiration ) return url params["Bucket"] = self.bucket.name params["Key"] = name connection = ( self.connection if self.querystring_auth else self.unsigned_connection ) url = connection.meta.client.generate_presigned_url( "get_object", Params=params, ExpiresIn=expire, HttpMethod=http_method ) return url def get_available_name(self, name, max_length=None): """Overwrite existing file with the same name.""" name = clean_name(name) if self.file_overwrite: return get_available_overwrite_name(name, max_length) return super().get_available_name(name, max_length) class S3StaticStorage(S3Storage): """Querystring auth must be disabled so that url() returns a consistent output.""" querystring_auth = False class S3ManifestStaticStorage(ManifestFilesMixin, S3StaticStorage): """Add ManifestFilesMixin with S3StaticStorage.""" django-storages-1.14.5/storages/backends/s3boto3.py000066400000000000000000000004431475414346200221530ustar00rootroot00000000000000"""Backwards compat shim.""" from storages.backends.s3 import S3File as S3Boto3StorageFile # noqa from storages.backends.s3 import S3ManifestStaticStorage # noqa from storages.backends.s3 import S3StaticStorage # noqa from storages.backends.s3 import S3Storage as S3Boto3Storage # noqa django-storages-1.14.5/storages/backends/sftpstorage.py000066400000000000000000000177441475414346200232340ustar00rootroot00000000000000# SFTP storage backend for Django. # Author: Brent Tubbs # License: MIT # # Modeled on the FTP storage by Rafal Jonca import datetime import getpass import io import os import posixpath import stat from urllib.parse import urljoin import paramiko from django.core.files.base import File from django.utils.deconstruct import deconstructible from paramiko.util import ClosingContextManager from storages.base import BaseStorage from storages.utils import is_seekable from storages.utils import setting @deconstructible class SFTPStorage(ClosingContextManager, BaseStorage): def __init__(self, **settings): super().__init__(**settings) self._ssh = None self._sftp = None def get_default_settings(self): return { "host": setting("SFTP_STORAGE_HOST"), "params": setting("SFTP_STORAGE_PARAMS", {}), "interactive": setting("SFTP_STORAGE_INTERACTIVE", False), "file_mode": setting("SFTP_STORAGE_FILE_MODE"), "dir_mode": setting("SFTP_STORAGE_DIR_MODE"), "uid": setting("SFTP_STORAGE_UID"), "gid": setting("SFTP_STORAGE_GID"), "known_host_file": setting("SFTP_KNOWN_HOST_FILE"), "root_path": setting("SFTP_STORAGE_ROOT", ""), "base_url": setting("SFTP_BASE_URL") or setting("MEDIA_URL"), } def _connect(self): self._ssh = paramiko.SSHClient() known_host_file = self.known_host_file or os.path.expanduser( os.path.join("~", ".ssh", "known_hosts") ) if os.path.exists(known_host_file): self._ssh.load_host_keys(known_host_file) # and automatically add new host keys for hosts we haven't seen before. self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: self._ssh.connect(self.host, **self.params) except paramiko.AuthenticationException as e: if self.interactive and "password" not in self.params: # If authentication has failed, and we haven't already tried # username/password, and configuration allows it, then try # again with username/password. if "username" not in self.params: self.params["username"] = getpass.getuser() self.params["password"] = getpass.getpass() self._connect() else: raise paramiko.AuthenticationException(e) if self._ssh.get_transport(): self._sftp = self._ssh.open_sftp() def close(self): if self._ssh is None: return self._ssh.close() @property def sftp(self): """Lazy SFTP connection""" if not self._sftp or not self._ssh.get_transport().is_active(): self._connect() return self._sftp def _remote_path(self, name): return posixpath.join(self.root_path, name) def _open(self, name, mode="rb"): return SFTPStorageFile(name, self, mode) def _read(self, name): remote_path = self._remote_path(name) return self.sftp.open(remote_path, "rb") def _chown(self, path, uid=None, gid=None): """Set uid and/or gid for file at path.""" # Paramiko's chown requires both uid and gid, so look them up first if # we're only supposed to set one. if uid is None or gid is None: attr = self.sftp.stat(path) uid = uid or attr.st_uid gid = gid or attr.st_gid self.sftp.chown(path, uid, gid) def _mkdir(self, path): """Create directory, recursing up to create parent dirs if necessary.""" parent = posixpath.dirname(path) if not self._path_exists(parent): self._mkdir(parent) self.sftp.mkdir(path) if self.dir_mode is not None: self.sftp.chmod(path, self.dir_mode) if self.uid or self.gid: self._chown(path, uid=self.uid, gid=self.gid) def _save(self, name, content): """Save file via SFTP.""" if is_seekable(content): content.seek(0, os.SEEK_SET) path = self._remote_path(name) dirname = posixpath.dirname(path) if not self._path_exists(dirname): self._mkdir(dirname) self.sftp.putfo(content, path) # set file permissions if configured if self.file_mode is not None: self.sftp.chmod(path, self.file_mode) if self.uid or self.gid: self._chown(path, uid=self.uid, gid=self.gid) return name def delete(self, name): try: self.sftp.remove(self._remote_path(name)) except OSError: pass def _path_exists(self, path): """Determines whether a file existis in the sftp storage given its absolute path.""" try: self.sftp.stat(path) return True except FileNotFoundError: return False def exists(self, name): """Determines whether a file exists within the root folder of the SFTP storage (as set by `SFTP_STORAGE_ROOT`). This method differs from `._path_exists()` in that the provided `name` is assumed to be the relative path of the file within the root folder. """ return self._path_exists(self._remote_path(name)) def _isdir_attr(self, item): # Return whether an item in sftp.listdir_attr results is a directory if item.st_mode is not None: return stat.S_IFMT(item.st_mode) == stat.S_IFDIR else: return False def listdir(self, path): remote_path = self._remote_path(path) dirs, files = [], [] for item in self.sftp.listdir_attr(remote_path): if self._isdir_attr(item): dirs.append(item.filename) else: files.append(item.filename) return dirs, files def size(self, name): remote_path = self._remote_path(name) return self.sftp.stat(remote_path).st_size # From Django def _datetime_from_timestamp(self, ts): tz = datetime.timezone.utc if setting("USE_TZ") else None return datetime.datetime.fromtimestamp(ts, tz=tz) def get_accessed_time(self, name): remote_path = self._remote_path(name) utime = self.sftp.stat(remote_path).st_atime return self._datetime_from_timestamp(utime) def get_modified_time(self, name): remote_path = self._remote_path(name) utime = self.sftp.stat(remote_path).st_mtime return self._datetime_from_timestamp(utime) def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") return urljoin(self.base_url, name).replace("\\", "/") class SFTPStorageFile(File): def __init__(self, name, storage, mode): self.name = name self.mode = mode self.file = io.BytesIO() self._storage = storage self._is_read = False self._is_dirty = False @property def size(self): if not hasattr(self, "_size"): self._size = self._storage.size(self.name) return self._size def read(self, num_bytes=None): if not self._is_read: self.file = self._storage._read(self.name) self._is_read = True return self.file.read(num_bytes) def write(self, content): if "w" not in self.mode: raise AttributeError("File was opened for read-only access.") self.file = io.BytesIO(content) self._is_dirty = True self._is_read = True def open(self, mode=None): if not self.closed: self.seek(0) elif self.name and self._storage.exists(self.name): self.file = self._storage._open(self.name, mode or self.mode) else: raise ValueError("The file cannot be reopened.") def close(self): if self._is_dirty: self._storage._save(self.name, self) self.file.close() django-storages-1.14.5/storages/base.py000066400000000000000000000013761475414346200200250ustar00rootroot00000000000000from django.core.exceptions import ImproperlyConfigured from django.core.files.storage import Storage class BaseStorage(Storage): def __init__(self, **settings): default_settings = self.get_default_settings() for name, value in default_settings.items(): if not hasattr(self, name): setattr(self, name, value) for name, value in settings.items(): if name not in default_settings: raise ImproperlyConfigured( "Invalid setting '{}' for {}".format( name, self.__class__.__name__, ) ) setattr(self, name, value) def get_default_settings(self): return {} django-storages-1.14.5/storages/compress.py000066400000000000000000000026371475414346200207470ustar00rootroot00000000000000import io import zlib from gzip import GzipFile from typing import Optional from storages.utils import to_bytes class GzipCompressionWrapper(io.RawIOBase): """Wrapper for compressing file contents on the fly.""" def __init__(self, raw, level=zlib.Z_BEST_COMPRESSION): super().__init__() self.raw = raw self.compress = zlib.compressobj(level=level, wbits=31) self.leftover = bytearray() @staticmethod def readable(): return True def readinto(self, buf: bytearray) -> Optional[int]: size = len(buf) while len(self.leftover) < size: chunk = to_bytes(self.raw.read(size)) if not chunk: if self.compress: self.leftover += self.compress.flush(zlib.Z_FINISH) self.compress = None break self.leftover += self.compress.compress(chunk) if len(self.leftover) == 0: return 0 output = self.leftover[:size] size = len(output) buf[:size] = output self.leftover = self.leftover[size:] return size class CompressStorageMixin: def _compress_content(self, content): """Gzip a given string content.""" return GzipCompressionWrapper(content) class CompressedFileMixin: def _decompress_file(self, mode, file, mtime=0.0): return GzipFile(mode=mode, fileobj=file, mtime=mtime) django-storages-1.14.5/storages/utils.py000066400000000000000000000126401475414346200202470ustar00rootroot00000000000000import os import pathlib import posixpath from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import SuspiciousFileOperation from django.core.files.utils import FileProxyMixin from django.core.files.utils import validate_file_name from django.utils.encoding import force_bytes def to_bytes(content): """Wrap Django's force_bytes to pass through bytearrays.""" if isinstance(content, bytearray): return content return force_bytes(content) def setting(name, default=None): """ Helper function to get a Django setting by name. If setting doesn't exists it will return a default. :param name: Name of setting :type name: str :param default: Value if setting is unfound :returns: Setting's value """ return getattr(settings, name, default) def clean_name(name): """ Normalize the name. Includes cleaning up Windows style paths, ensuring an ending trailing slash, and coercing from pathlib.PurePath. """ if isinstance(name, pathlib.PurePath): name = str(name) # Normalize Windows style paths clean_name = posixpath.normpath(name).replace("\\", "/") # os.path.normpath() can strip trailing slashes so we implement # a workaround here. if name.endswith("/") and not clean_name.endswith("/"): # Add a trailing slash as it was stripped. clean_name += "/" # Given an empty string, os.path.normpath() will return ., which we don't want if clean_name == ".": clean_name = "" return clean_name def safe_join(base, *paths): """ A version of django.utils._os.safe_join for S3 paths. Joins one or more path components to the base path component intelligently. Returns a normalized version of the final path. The final path must be located inside of the base path component (otherwise a ValueError is raised). Paths outside the base path indicate a possible security sensitive operation. """ base_path = base base_path = base_path.rstrip("/") paths = list(paths) final_path = base_path + "/" for path in paths: _final_path = posixpath.normpath(posixpath.join(final_path, path)) # posixpath.normpath() strips the trailing /. Add it back. if path.endswith("/") or _final_path + "/" == final_path: _final_path += "/" final_path = _final_path if final_path == base_path: final_path += "/" # Ensure final_path starts with base_path and that the next character after # the base path is /. base_path_len = len(base_path) if not final_path.startswith(base_path) or final_path[base_path_len] != "/": raise ValueError( "the joined path is located outside of the base path component" ) return final_path.lstrip("/") def check_location(storage): if storage.location.startswith("/"): correct = storage.location.lstrip("/") raise ImproperlyConfigured( ( "{}.location cannot begin with a leading slash. Found '{}'. Use '{}' " "instead." ).format( storage.__class__.__name__, storage.location, correct, ) ) def lookup_env(names): """ Look up for names in environment. Returns the first element found. """ for name in names: value = os.environ.get(name) if value: return value def get_available_overwrite_name(name, max_length): # This is adapted from Django, and will be removed once # Django 5.1 is the lowest supported version dir_name, file_name = os.path.split(name) if ".." in pathlib.PurePath(dir_name).parts: raise SuspiciousFileOperation( "Detected path traversal attempt in '%s'" % dir_name ) validate_file_name(file_name, allow_relative_path=True) if max_length is None or len(name) <= max_length: return name file_root, file_ext = os.path.splitext(file_name) truncation = len(name) - max_length file_root = file_root[:-truncation] if not file_root: raise SuspiciousFileOperation( 'Storage tried to truncate away entire filename "%s". ' "Please make sure that the corresponding file field " 'allows sufficient "max_length".' % name ) name = os.path.join(dir_name, "{}{}".format(file_root, file_ext)) validate_file_name(name, allow_relative_path=True) return name def is_seekable(file_object): return not hasattr(file_object, "seekable") or file_object.seekable() class ReadBytesWrapper(FileProxyMixin): """ A wrapper for a file-like object, that makes read() always returns bytes. """ def __init__(self, file, encoding=None): """ :param file: The file-like object to wrap. :param encoding: Specify the encoding to use when file.read() returns strings. If not provided will default to file.encoding, of if that's not available, to utf-8. """ self.file = file self._encoding = encoding or getattr(file, "encoding", None) or "utf-8" def read(self, *args, **kwargs): content = self.file.read(*args, **kwargs) if not isinstance(content, bytes): content = content.encode(self._encoding) return content def close(self): self.file.close() def readable(self): return True django-storages-1.14.5/tests/000077500000000000000000000000001475414346200160455ustar00rootroot00000000000000django-storages-1.14.5/tests/__init__.py000066400000000000000000000000001475414346200201440ustar00rootroot00000000000000django-storages-1.14.5/tests/settings.py000066400000000000000000000004021475414346200202530ustar00rootroot00000000000000MEDIA_URL = "/media/" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} SECRET_KEY = "hailthesunshine" USE_TZ = True # the following test settings are required for moto to work. AWS_STORAGE_BUCKET_NAME = "test-bucket" django-storages-1.14.5/tests/test_azure.py000066400000000000000000000462271475414346200206170ustar00rootroot00000000000000import datetime from datetime import timedelta from unittest import mock import django from azure.storage.blob import BlobProperties from django.core.exceptions import SuspiciousOperation from django.core.files.base import ContentFile from django.test import TestCase from django.test import override_settings from django.utils.timezone import make_aware from storages.backends import azure_storage class AzureStorageTest(TestCase): def setUp(self, *args): self.storage = azure_storage.AzureStorage() self.storage._client = mock.MagicMock() self.storage.overwrite_files = True self.account_name = "test" self.account_key = "key" self.container_name = "test" self.storage.azure_account = self.account_name self.storage.account_key = self.account_key self.storage.azure_container = self.container_name def test_get_valid_path(self): self.assertEqual( self.storage._get_valid_path("path/to/somewhere"), "path/to/somewhere" ) self.assertEqual( self.storage._get_valid_path("path/to/../somewhere"), "path/somewhere" ) self.assertEqual(self.storage._get_valid_path("path/to/../"), "path") self.assertEqual(self.storage._get_valid_path("path\\to\\..\\"), "path") self.assertEqual(self.storage._get_valid_path("path/name/"), "path/name") self.assertEqual( self.storage._get_valid_path("path\\to\\somewhere"), "path/to/somewhere" ) self.assertEqual(self.storage._get_valid_path("some/$/path"), "some/$/path") self.assertEqual(self.storage._get_valid_path("/$/path"), "$/path") self.assertEqual(self.storage._get_valid_path("path/$/"), "path/$") self.assertEqual( self.storage._get_valid_path("path/$/$/$/path"), "path/$/$/$/path" ) self.assertEqual(self.storage._get_valid_path("some///path"), "some/path") self.assertEqual(self.storage._get_valid_path("some//path"), "some/path") self.assertEqual(self.storage._get_valid_path("some\\\\path"), "some/path") self.assertEqual(self.storage._get_valid_path("a" * 1024), "a" * 1024) self.assertEqual(self.storage._get_valid_path("a/a" * 256), "a/a" * 256) self.assertRaises(ValueError, self.storage._get_valid_path, "") self.assertRaises(ValueError, self.storage._get_valid_path, "/") self.assertRaises(ValueError, self.storage._get_valid_path, "/../") self.assertRaises(ValueError, self.storage._get_valid_path, "..") self.assertRaises(ValueError, self.storage._get_valid_path, "///") self.assertRaises(ValueError, self.storage._get_valid_path, "a" * 1025) self.assertRaises(ValueError, self.storage._get_valid_path, "a/a" * 257) def test_get_valid_path_idempotency(self): self.assertEqual(self.storage._get_valid_path("//$//a//$//"), "$/a/$") self.assertEqual( self.storage._get_valid_path(self.storage._get_valid_path("//$//a//$//")), self.storage._get_valid_path("//$//a//$//"), ) some_path = "some path/some long name & then some.txt" self.assertEqual(self.storage._get_valid_path(some_path), some_path) self.assertEqual( self.storage._get_valid_path(self.storage._get_valid_path(some_path)), self.storage._get_valid_path(some_path), ) def test_get_available_name(self): self.storage.overwrite_files = False client_mock = mock.MagicMock() client_mock.exists.side_effect = [True, False] self.storage._client.get_blob_client.return_value = client_mock name = self.storage.get_available_name("foo.txt") self.assertTrue(name.startswith("foo_")) self.assertTrue(name.endswith(".txt")) self.assertTrue(len(name) > len("foo.txt")) self.assertEqual(client_mock.exists.call_count, 2) def test_get_available_name_first(self): self.storage.overwrite_files = False client_mock = mock.MagicMock() client_mock.exists.return_value = False self.storage._client.get_blob_client.return_value = client_mock self.assertEqual( self.storage.get_available_name("foo bar baz.txt"), "foo bar baz.txt" ) self.assertEqual(client_mock.exists.call_count, 1) def test_get_available_name_max_len(self): self.storage.overwrite_files = False # if you wonder why this is, file-system # storage will raise when file name is too long as well, # the form should validate this client_mock = mock.MagicMock() client_mock.exists.side_effect = [True, False] self.storage._client.get_blob_client.return_value = client_mock self.assertRaises(ValueError, self.storage.get_available_name, "a" * 1025) name = self.storage.get_available_name( "a" * 1000, max_length=100 ) # max_len == 1024 self.assertEqual(len(name), 100) self.assertTrue("_" in name) self.assertEqual(client_mock.exists.call_count, 2) def test_get_available_invalid(self): self.storage.overwrite_files = False self.storage._client.exists.return_value = False if django.VERSION[:2] == (3, 0): # Django 2.2.21 added this security fix: # https://docs.djangoproject.com/en/3.2/releases/2.2.21/#cve-2021-31542-potential-directory-traversal-via-uploaded-files # It raises SuspiciousOperation before we get to our ValueError. # The fix wasn't applied to 3.0 (no longer in support), but was applied to # 3.1 & 3.2. self.assertRaises(ValueError, self.storage.get_available_name, "") self.assertRaises(ValueError, self.storage.get_available_name, "/") self.assertRaises(ValueError, self.storage.get_available_name, ".") self.assertRaises(ValueError, self.storage.get_available_name, "///") else: self.assertRaises(SuspiciousOperation, self.storage.get_available_name, "") self.assertRaises(SuspiciousOperation, self.storage.get_available_name, "/") self.assertRaises(SuspiciousOperation, self.storage.get_available_name, ".") self.assertRaises( SuspiciousOperation, self.storage.get_available_name, "///" ) self.assertRaises(ValueError, self.storage.get_available_name, "...") def test_url(self): blob_mock = mock.MagicMock() blob_mock.url = "https://ret_foo.blob.core.windows.net/test/some%20blob" self.storage._client.get_blob_client.return_value = blob_mock self.assertEqual(self.storage.url("some blob"), blob_mock.url) self.storage._client.get_blob_client.assert_called_once_with("some blob") def test_url_unsafe_chars(self): blob_mock = mock.MagicMock() blob_mock.url = "https://ret_foo.blob.core.windows.net/test/some%20blob" self.storage._client.get_blob_client.return_value = blob_mock self.assertEqual( self.storage.url("foo;?:@=&\"<>#%{}|^~[]`bar/~!*()'"), blob_mock.url ) self.storage._client.get_blob_client.assert_called_once_with( "foo;?:@=&\"<>#%{}|^~[]`bar/~!*()'" ) @mock.patch("storages.backends.azure_storage.generate_blob_sas") def test_url_expire(self, generate_blob_sas_mocked): generate_blob_sas_mocked.return_value = "foo_token" blob_mock = mock.MagicMock() blob_mock.url = "https://ret_foo.blob.core.windows.net/test/some%20blob" self.storage._client.get_blob_client.return_value = blob_mock self.storage.account_name = self.account_name fixed_time = make_aware( datetime.datetime(2016, 11, 6, 4), datetime.timezone.utc ) with mock.patch("storages.backends.azure_storage.datetime") as d_mocked: # Implicit read permission d_mocked.utcnow.return_value = fixed_time self.assertEqual( self.storage.url("some blob", 100), "https://ret_foo.blob.core.windows.net/test/some%20blob", ) generate_blob_sas_mocked.assert_called_once_with( self.account_name, self.container_name, "some blob", account_key=self.account_key, user_delegation_key=None, permission=mock.ANY, expiry=fixed_time + timedelta(seconds=100), ) called_args, called_kwargs = generate_blob_sas_mocked.call_args self.assertEqual(str(called_kwargs["permission"]), "r") # Explicit write permission d_mocked.utcnow.return_value = fixed_time self.assertEqual( self.storage.url("some blob", expire=100, mode="w"), "https://ret_foo.blob.core.windows.net/test/some%20blob", ) generate_blob_sas_mocked.assert_called_with( self.account_name, self.container_name, "some blob", account_key=self.account_key, user_delegation_key=None, permission=mock.ANY, expiry=fixed_time + timedelta(seconds=100), ) called_args, called_kwargs = generate_blob_sas_mocked.call_args self.assertEqual(str(called_kwargs["permission"]), "w") def test_url_custom_domain(self): self.storage.custom_domain = "foo_domain" blob_mock = mock.MagicMock() blob_mock.url = "https://ret_foo.blob.core.windows.net/test/foo_name" self.storage._client.get_blob_client.return_value = blob_mock url = self.storage.url("foo_name") self.assertEqual(url, "https://foo_domain/test/foo_name") @mock.patch("storages.backends.azure_storage.generate_blob_sas") def test_url_expire_user_delegation_key(self, generate_blob_sas_mocked): generate_blob_sas_mocked.return_value = "foo_token" blob_mock = mock.MagicMock() blob_mock.url = "https://ret_foo.blob.core.windows.net/test/some%20blob" self.storage._client.get_blob_client.return_value = blob_mock self.storage.account_name = self.account_name service_client = mock.MagicMock() self.storage._service_client = service_client self.storage.token_credential = "token_credential" fixed_time = make_aware( datetime.datetime(2016, 11, 6, 4), datetime.timezone.utc ) with mock.patch("storages.backends.azure_storage.datetime") as d_mocked: d_mocked.utcnow.return_value = fixed_time service_client.get_user_delegation_key.return_value = "user delegation key" self.assertEqual( self.storage.url("some blob", 100), "https://ret_foo.blob.core.windows.net/test/some%20blob", ) generate_blob_sas_mocked.assert_called_once_with( self.account_name, self.container_name, "some blob", account_key=self.account_key, user_delegation_key="user delegation key", permission=mock.ANY, expiry=fixed_time + timedelta(seconds=100), ) def test_container_client_default_params(self): storage = azure_storage.AzureStorage() storage.account_name = self.account_name with mock.patch( "storages.backends.azure_storage.BlobServiceClient", autospec=True ) as bsc_mocked: client_mock = mock.MagicMock() bsc_mocked.return_value.get_container_client.return_value = client_mock self.assertEqual(storage.client, client_mock) bsc_mocked.assert_called_once_with( "https://test.blob.core.windows.net", credential=None ) def test_container_client_params_account_key(self): storage = azure_storage.AzureStorage() storage.account_name = "foo_name" storage.custom_domain = "foo_domain" storage.account_key = "foo_key" with mock.patch( "storages.backends.azure_storage.BlobServiceClient", autospec=True ) as bsc_mocked: client_mock = mock.MagicMock() bsc_mocked.return_value.get_container_client.return_value = client_mock self.assertEqual(storage.client, client_mock) bsc_mocked.assert_called_once_with( "https://foo_name.blob.core.windows.net", credential={"account_name": "foo_name", "account_key": "foo_key"}, ) def test_container_client_params_sas_token(self): storage = azure_storage.AzureStorage() storage.account_name = "foo_name" storage.azure_ssl = False storage.sas_token = "foo_token" with mock.patch( "storages.backends.azure_storage.BlobServiceClient", autospec=True ) as bsc_mocked: client_mock = mock.MagicMock() bsc_mocked.return_value.get_container_client.return_value = client_mock self.assertEqual(storage.client, client_mock) bsc_mocked.assert_called_once_with( "http://foo_name.blob.core.windows.net", credential="foo_token" ) def test_container_client_params_token_credential(self): storage = azure_storage.AzureStorage() storage.account_name = self.account_name storage.token_credential = "foo_cred" with mock.patch( "storages.backends.azure_storage.BlobServiceClient", autospec=True ) as bsc_mocked: client_mock = mock.MagicMock() bsc_mocked.return_value.get_container_client.return_value = client_mock self.assertEqual(storage.client, client_mock) bsc_mocked.assert_called_once_with( "https://test.blob.core.windows.net", credential="foo_cred" ) def test_connection_string_can_have_missing(self): storage = azure_storage.AzureStorage( connection_string="AccountKey=abc;Foobar=xyz;" ) self.assertEqual(storage.account_key, "abc") self.assertIsNone(storage.account_name) def test_container_client_params_connection_string(self): storage = azure_storage.AzureStorage() storage.account_name = self.account_name storage.connection_string = "foo_conn" with mock.patch( "storages.backends.azure_storage.BlobServiceClient.from_connection_string", spec=azure_storage.BlobServiceClient.from_connection_string, ) as bsc_mocked: client_mock = mock.MagicMock() bsc_mocked.return_value.get_container_client.return_value = client_mock self.assertEqual(storage.client, client_mock) bsc_mocked.assert_called_once_with("foo_conn") # From boto3 def test_storage_save(self): """ Test saving a file """ name = "test storage save.txt" content = ContentFile("new content") with mock.patch("storages.backends.azure_storage.ContentSettings") as c_mocked: c_mocked.return_value = "content_settings_foo" self.assertEqual(self.storage.save(name, content), name) self.storage._client.upload_blob.assert_called_once_with( name, content.file, content_settings="content_settings_foo", max_concurrency=2, timeout=20, overwrite=True, ) c_mocked.assert_called_once_with( content_type="text/plain", content_encoding=None, cache_control=None ) def test_storage_open_write(self): """ Test opening a file in write mode """ name = "test_open_for_writïng.txt" content = "new content" file = self.storage.open(name, "w") file.write(content) written_file = file.file file.close() self.storage._client.upload_blob.assert_called_once_with( name, written_file, content_settings=mock.ANY, max_concurrency=2, timeout=20, overwrite=True, ) def test_storage_exists(self): blob_name = "blob" client_mock = mock.MagicMock() self.storage._client.get_blob_client.return_value = client_mock self.assertTrue(self.storage.exists(blob_name)) self.assertEqual(client_mock.exists.call_count, 1) def test_delete_blob(self): self.storage.delete("name") self.storage._client.delete_blob.assert_called_once_with("name", timeout=20) def test_storage_listdir_base(self): file_names = ["some/path/1.txt", "2.txt", "other/path/3.txt", "4.txt"] result = [] for p in file_names: obj = mock.MagicMock() obj.name = p result.append(obj) self.storage._client.list_blobs.return_value = iter(result) dirs, files = self.storage.listdir("") self.storage._client.list_blobs.assert_called_with( name_starts_with="", timeout=20 ) self.assertEqual(len(dirs), 0) self.assertEqual(len(files), 4) for filename in ["2.txt", "4.txt", "other/path/3.txt", "some/path/1.txt"]: self.assertTrue( filename in files, """ "{}" not in file list "{}".""".format(filename, files), ) def test_size_of_file(self): props = BlobProperties() props.size = 12 client_mock = mock.MagicMock() client_mock.get_blob_properties.return_value = props self.storage._client.get_blob_client.return_value = client_mock self.assertEqual(12, self.storage.size("name")) def test_override_settings(self): with override_settings(AZURE_CONTAINER="foo1"): storage = azure_storage.AzureStorage() self.assertEqual(storage.azure_container, "foo1") with override_settings(AZURE_CONTAINER="foo2"): storage = azure_storage.AzureStorage() self.assertEqual(storage.azure_container, "foo2") def test_override_class_variable(self): class MyStorage1(azure_storage.AzureStorage): azure_container = "foo1" storage = MyStorage1() self.assertEqual(storage.azure_container, "foo1") class MyStorage2(azure_storage.AzureStorage): azure_container = "foo2" storage = MyStorage2() self.assertEqual(storage.azure_container, "foo2") def test_override_init_argument(self): storage = azure_storage.AzureStorage(azure_container="foo1") self.assertEqual(storage.azure_container, "foo1") storage = azure_storage.AzureStorage(azure_container="foo2") self.assertEqual(storage.azure_container, "foo2") @mock.patch("storages.backends.azure_storage.BlobServiceClient", autospec=True) def test_client_settings(self, bsc): with override_settings(AZURE_CLIENT_OPTIONS={"api_version": "1.3"}): storage = azure_storage.AzureStorage(account_name="test") client_mock = mock.MagicMock() bsc.return_value.get_container_client.return_value = client_mock self.assertEqual(storage.client, client_mock) bsc.assert_called_once_with( "https://test.blob.core.windows.net", credential=None, api_version="1.3" ) django-storages-1.14.5/tests/test_dropbox.py000066400000000000000000000173541475414346200211450ustar00rootroot00000000000000import io from datetime import datetime from unittest import mock from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import SuspiciousFileOperation from django.core.files.base import File from django.test import TestCase from django.test import override_settings from dropbox.files import FileMetadata from dropbox.files import FolderMetadata from dropbox.files import GetTemporaryLinkResult from requests.models import Response from storages.backends import dropbox FILE_DATE = datetime(2015, 8, 24, 15, 6, 41) FILE_METADATA_MOCK = mock.MagicMock(spec=FileMetadata) FILE_METADATA_MOCK.size = 4 FILE_METADATA_MOCK.client_modified = FILE_DATE FILE_METADATA_MOCK.server_modified = FILE_DATE FILE_METADATA_MOCK.path_lower = "/foo.txt" FILE_METADATA_MOCK.path_display = "/foo.txt" FILE_METADATA_MOCK.name = "foo.txt" FILE_METADATA_MOCK.rev = "012c0000000150c838f0" FILE_METADATA_MOCK.content_hash = ( "3865695d47c02576e8578df30d56bb3faf737c11044d804f09ffb6484453020f" ) FOLDER_METADATA_MOCK = mock.MagicMock(spec=FolderMetadata) FOLDER_METADATA_MOCK.name = "bar" FILES_MOCK = mock.MagicMock(spec=FolderMetadata) FILES_MOCK.entries = [FILE_METADATA_MOCK, FOLDER_METADATA_MOCK] FILE_MEDIA_MOCK = mock.MagicMock(spec=GetTemporaryLinkResult) FILE_MEDIA_MOCK.link = "https://dl.dropboxusercontent.com/1/view/foo" FILES_EMPTY_MOCK = mock.MagicMock(spec=FolderMetadata) FILES_EMPTY_MOCK.entries = [] RESPONSE_200_MOCK = mock.MagicMock(spec=Response) RESPONSE_200_MOCK.status_code = 200 RESPONSE_200_MOCK.content = b"bar" RESPONSE_500_MOCK = mock.MagicMock(spec=Response) RESPONSE_500_MOCK.status_code = 500 class DropboxTest(TestCase): def setUp(self, *args): self.storage = dropbox.DropboxStorage("foo") def test_no_access_token(self, *args): with self.assertRaises(ImproperlyConfigured): dropbox.DropboxStorage(None) def test_setting_access_token(self): with override_settings(DROPBOX_OAUTH2_TOKEN="abc"): storage = dropbox.DropboxStorage() self.assertEqual(storage.oauth2_access_token, "abc") def test_refresh_token_app_key_no_app_secret(self, *args): inputs = { "oauth2_refresh_token": "foo", "app_key": "bar", } with self.assertRaises(ImproperlyConfigured): dropbox.DropboxStorage(**inputs) def test_refresh_token_app_secret_no_app_key(self, *args): inputs = { "oauth2_refresh_token": "foo", "app_secret": "bar", } with self.assertRaises(ImproperlyConfigured): dropbox.DropboxStorage(**inputs) def test_app_key_app_secret_no_refresh_token(self, *args): inputs = { "app_key": "foo", "app_secret": "bar", } with self.assertRaises(ImproperlyConfigured): dropbox.DropboxStorage(**inputs) @mock.patch("dropbox.Dropbox.files_delete", return_value=FILE_METADATA_MOCK) def test_delete(self, *args): self.storage.delete("foo") @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=[FILE_METADATA_MOCK]) def test_exists(self, *args): exists = self.storage.exists("foo") self.assertTrue(exists) @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=[]) def test_not_exists(self, *args): exists = self.storage.exists("bar") self.assertFalse(exists) @mock.patch("dropbox.Dropbox.files_list_folder", return_value=FILES_MOCK) def test_listdir(self, *args): dirs, files = self.storage.listdir("/") dirs2, files2 = self.storage.listdir("") self.assertEqual(dirs, dirs2) self.assertEqual(files2, files2) self.assertGreater(len(dirs), 0) self.assertGreater(len(files), 0) self.assertEqual(dirs[0], "bar") self.assertEqual(files[0], "foo.txt") @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=FILE_METADATA_MOCK) def test_size(self, *args): size = self.storage.size("foo") self.assertEqual(size, FILE_METADATA_MOCK.size) def test_open(self, *args): obj = self.storage._open("foo") self.assertIsInstance(obj, File) @mock.patch("dropbox.Dropbox.files_upload", return_value="foo") @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=None) def test_save(self, files_upload, *args): name = self.storage.save("foo", File(io.BytesIO(b"bar"), "foo")) self.assertTrue(files_upload.called) self.assertEqual(name, "foo") @mock.patch("dropbox.Dropbox.files_upload") @mock.patch("dropbox.Dropbox.files_upload_session_finish") @mock.patch("dropbox.Dropbox.files_upload_session_append_v2") @mock.patch( "dropbox.Dropbox.files_upload_session_start", return_value=mock.MagicMock(session_id="foo"), ) def test_chunked_upload(self, start, append, finish, upload): large_file = File(io.BytesIO(b"bar" * self.storage.CHUNK_SIZE), "foo") self.storage._save("foo", large_file) self.assertTrue(start.called) self.assertTrue(append.called) self.assertTrue(finish.called) self.assertFalse(upload.called) @mock.patch( "dropbox.Dropbox.files_get_temporary_link", return_value=FILE_MEDIA_MOCK ) def test_url(self, *args): url = self.storage.url("foo") self.assertEqual(url, FILE_MEDIA_MOCK.link) def test_formats(self, *args): self.storage = dropbox.DropboxStorage("foo") files = self.storage._full_path("") self.assertEqual(files, self.storage._full_path("/")) self.assertEqual(files, self.storage._full_path(".")) self.assertEqual(files, self.storage._full_path("..")) self.assertEqual(files, self.storage._full_path("../..")) class DropboxFileTest(TestCase): def setUp(self, *args): self.storage = dropbox.DropboxStorage("foo") self.file = dropbox.DropboxFile("/foo.txt", self.storage) @mock.patch( "dropbox.Dropbox.files_download", return_value=(FILE_METADATA_MOCK, RESPONSE_200_MOCK), ) def test_read(self, *args): with self.storage.open("foo.txt") as file: self.assertEqual(file.read(), b"bar") @mock.patch( "dropbox.Dropbox.files_download", return_value=(FILE_METADATA_MOCK, RESPONSE_500_MOCK), ) def test_server_bad_response(self, *args): with self.assertRaises(dropbox.DropboxStorageException): with self.storage.open("foo.txt") as file: file.read() @mock.patch("dropbox.Dropbox.files_list_folder", return_value=FILES_EMPTY_MOCK) class DropboxRootPathTest(TestCase): def test_jailed(self, *args): self.storage = dropbox.DropboxStorage("foo", root_path="/bar") dirs, files = self.storage.listdir("/") self.assertFalse(dirs) self.assertFalse(files) @mock.patch("dropbox.Dropbox.files_upload", return_value="foo") @mock.patch("dropbox.Dropbox.files_get_metadata", return_value=None) def test_saves(self, *args): self.storage = dropbox.DropboxStorage("foo", root_path="/app.qoo.foo/") for filename in ["xyz", "quark"]: with self.subTest(filename=filename): name = self.storage.save(filename, File(io.BytesIO(b"abc"), "def")) self.assertEqual(name, filename) def test_suspicious(self, *args): self.storage = dropbox.DropboxStorage("foo", root_path="/bar") with self.assertRaises((SuspiciousFileOperation, ValueError)): self.storage._full_path("..") def test_formats(self, *args): self.storage = dropbox.DropboxStorage("foo", root_path="/bar") files = self.storage._full_path("") self.assertEqual(files, self.storage._full_path("/")) self.assertEqual(files, self.storage._full_path(".")) django-storages-1.14.5/tests/test_files/000077500000000000000000000000001475414346200202065ustar00rootroot00000000000000django-storages-1.14.5/tests/test_files/windows-1252-encoded.txt000066400000000000000000000000031475414346200244200ustar00rootroot00000000000000django-storages-1.14.5/tests/test_ftp.py000066400000000000000000000240451475414346200202540ustar00rootroot00000000000000import io from unittest.mock import patch from django.core.exceptions import ImproperlyConfigured from django.core.files.base import File from django.test import TestCase from django.test import override_settings from storages.backends import ftp USER = "foo" PASSWORD = "b@r" HOST = "localhost" PORT = 2121 LIST_FIXTURE = """drwxr-xr-x 2 ftp nogroup 4096 Jul 27 09:46 dir -rw-r--r-- 1 ftp nogroup 1024 Jul 27 09:45 fi -rw-r--r-- 1 ftp nogroup 2048 Jul 27 09:50 fi2""" def geturl(scheme="ftp", pwd=PASSWORD): return URL_TEMPLATE.format( scheme=scheme, user=USER, passwd=pwd, host=HOST, port=PORT ) URL_TEMPLATE = "{scheme}://{user}:{passwd}@{host}:{port}/" URL = geturl() def list_retrlines(cmd, func): for line in LIST_FIXTURE.splitlines(): func(line) class FTPTest(TestCase): def setUp(self): self.storage = ftp.FTPStorage(location=URL) def test_init_no_location(self): with self.assertRaises(ImproperlyConfigured): ftp.FTPStorage() @patch("storages.backends.ftp.setting", return_value=URL) def test_init_location_from_setting(self, mock_setting): storage = ftp.FTPStorage() self.assertTrue(mock_setting.called) self.assertEqual(storage.location, URL) def test_decode_location(self): config = self.storage._decode_location(URL) wanted_config = { "passwd": "b@r", "host": "localhost", "user": "foo", "active": False, "path": "/", "port": 2121, "secure": False, } self.assertEqual(config, wanted_config) # Test active FTP config = self.storage._decode_location("a" + URL) wanted_config = { "passwd": "b@r", "host": "localhost", "user": "foo", "active": True, "path": "/", "port": 2121, "secure": False, } self.assertEqual(config, wanted_config) def test_decode_location_error(self): with self.assertRaises(ImproperlyConfigured): self.storage._decode_location("foo") with self.assertRaises(ImproperlyConfigured): self.storage._decode_location("http://foo.pt") def test_decode_location_urlchars_password(self): self.storage._decode_location(geturl(pwd="b#r")) @override_settings(FTP_STORAGE_LOCATION=URL) def test_override_settings(self): storage = ftp.FTPStorage() self.assertEqual(storage.encoding, "latin-1") with override_settings(FTP_STORAGE_ENCODING="utf-8"): storage = ftp.FTPStorage() self.assertEqual(storage.encoding, "utf-8") storage = ftp.FTPStorage(encoding="utf-8") self.assertEqual(storage.encoding, "utf-8") @patch("ftplib.FTP") def test_start_connection(self, mock_ftp): self.storage._start_connection() self.assertIsNotNone(self.storage._connection) # Start active storage = ftp.FTPStorage(location="a" + URL) storage._start_connection() @patch("ftplib.FTP", **{"return_value.pwd.side_effect": IOError()}) def test_start_connection_timeout(self, mock_ftp): self.storage._start_connection() self.assertIsNotNone(self.storage._connection) @patch("ftplib.FTP", **{"return_value.connect.side_effect": IOError()}) def test_start_connection_error(self, mock_ftp): with self.assertRaises(ftp.FTPStorageException): self.storage._start_connection() @patch("ftplib.FTP", **{"return_value.quit.return_value": None}) def test_disconnect(self, mock_ftp_quit): self.storage._start_connection() self.storage.disconnect() self.assertIsNone(self.storage._connection) @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) def test_mkremdirs(self, mock_ftp): self.storage._start_connection() self.storage._mkremdirs("foo/bar") @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) def test_mkremdirs_n_subdirectories(self, mock_ftp): self.storage._start_connection() self.storage._mkremdirs("foo/bar/null") @patch( "ftplib.FTP", **{ "return_value.pwd.return_value": "foo", "return_value.storbinary.return_value": None, }, ) def test_put_file(self, mock_ftp): self.storage._start_connection() self.storage._put_file("foo", File(io.BytesIO(b"foo"), "foo")) @patch( "ftplib.FTP", **{ "return_value.pwd.return_value": "foo", "return_value.storbinary.side_effect": IOError(), }, ) def test_put_file_error(self, mock_ftp): self.storage._start_connection() with self.assertRaises(ftp.FTPStorageException): self.storage._put_file("foo", File(io.BytesIO(b"foo"), "foo")) def test_open(self): remote_file = self.storage._open("foo") self.assertIsInstance(remote_file, ftp.FTPStorageFile) @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) def test_read(self, mock_ftp): self.storage._start_connection() self.storage._read("foo") @patch("ftplib.FTP", **{"return_value.pwd.side_effect": IOError()}) def test_read2(self, mock_ftp): self.storage._start_connection() with self.assertRaises(ftp.FTPStorageException): self.storage._read("foo") @patch( "ftplib.FTP", **{ "return_value.pwd.return_value": "foo", "return_value.storbinary.return_value": None, }, ) def test_save(self, mock_ftp): self.storage._save("foo", File(io.BytesIO(b"foo"), "foo")) @patch("ftplib.FTP", **{"return_value.retrlines": list_retrlines}) def test_listdir(self, mock_retrlines): dirs, files = self.storage.listdir("/") self.assertEqual(len(dirs), 1) self.assertEqual(dirs, ["dir"]) self.assertEqual(len(files), 2) self.assertEqual(sorted(files), sorted(["fi", "fi2"])) @patch("ftplib.FTP", **{"return_value.retrlines.side_effect": IOError()}) def test_listdir_error(self, mock_ftp): with self.assertRaises(ftp.FTPStorageException): self.storage.listdir("/") @patch("ftplib.FTP", **{"return_value.nlst.return_value": ["foo", "foo2"]}) def test_exists(self, mock_ftp): self.assertTrue(self.storage.exists("foo")) self.assertFalse(self.storage.exists("bar")) @patch("ftplib.FTP", **{"return_value.nlst.side_effect": IOError()}) def test_exists_error(self, mock_ftp): with self.assertRaises(ftp.FTPStorageException): self.storage.exists("foo") @patch("ftplib.FTP", **{"return_value.nlst.return_value": ["foo", "foo2"]}) def test_exists_overwrite(self, mock_ftp): with override_settings(FTP_ALLOW_OVERWRITE=True): storage = ftp.FTPStorage(location=URL) self.assertFalse(storage.exists("foo")) @patch( "ftplib.FTP", **{ "return_value.delete.return_value": None, "return_value.nlst.return_value": ["foo", "foo2"], }, ) def test_delete(self, mock_ftp): self.storage.delete("foo") self.assertTrue(mock_ftp.return_value.delete.called) @patch("ftplib.FTP", **{"return_value.retrlines": list_retrlines}) def test_size(self, mock_ftp): self.assertEqual(1024, self.storage.size("fi")) self.assertEqual(2048, self.storage.size("fi2")) self.assertEqual(0, self.storage.size("bar")) @patch("ftplib.FTP", **{"return_value.retrlines.side_effect": IOError()}) def test_size_error(self, mock_ftp): self.assertEqual(0, self.storage.size("foo")) def test_url(self): with self.assertRaises(ValueError): self.storage.base_url = None self.storage.url("foo") self.storage = ftp.FTPStorage(location=URL, base_url="http://foo.bar/") self.assertEqual("http://foo.bar/foo", self.storage.url("foo")) class FTPStorageFileTest(TestCase): def setUp(self): self.storage = ftp.FTPStorage(location=URL) @patch("ftplib.FTP", **{"return_value.retrlines": list_retrlines}) def test_size(self, mock_ftp): file_ = ftp.FTPStorageFile("fi", self.storage, "wb") self.assertEqual(file_.size, 1024) @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) @patch("storages.backends.ftp.FTPStorage._read", return_value=io.BytesIO(b"foo")) def test_readlines(self, mock_ftp, mock_storage): file_ = ftp.FTPStorageFile("fi", self.storage, "wb") self.assertEqual([b"foo"], file_.readlines()) @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) @patch("storages.backends.ftp.FTPStorage._read", return_value=io.BytesIO(b"foo")) def test_read(self, mock_ftp, mock_storage): file_ = ftp.FTPStorageFile("fi", self.storage, "wb") self.assertEqual(b"foo", file_.read()) def test_write(self): file_ = ftp.FTPStorageFile("fi", self.storage, "wb") file_.write(b"foo") file_.seek(0) self.assertEqual(file_.file.read(), b"foo") @patch("ftplib.FTP", **{"return_value.pwd.return_value": "foo"}) @patch("storages.backends.ftp.FTPStorage._read", return_value=io.BytesIO(b"foo")) def test_close(self, mock_ftp, mock_storage): file_ = ftp.FTPStorageFile("fi", self.storage, "wb") file_.is_dirty = True file_.read() file_.close() class FTPTLSTest(TestCase): def setUp(self): self.storage = ftp.FTPStorage(location=geturl(scheme="ftps")) def test_decode_location(self): wanted_config = { "passwd": "b@r", "host": "localhost", "user": "foo", "active": False, "path": "/", "port": 2121, "secure": True, } self.assertEqual(self.storage._config, wanted_config) @patch("ftplib.FTP_TLS") def test_start_connection_calls_prot_p(self, mock_ftp): self.storage._start_connection() self.storage._connection.prot_p.assert_called_once() django-storages-1.14.5/tests/test_gcloud.py000066400000000000000000000530601475414346200207370ustar00rootroot00000000000000import datetime import gzip import mimetypes from datetime import timedelta from unittest import mock from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile from django.test import TestCase from django.test import override_settings from django.utils import timezone from google.cloud.exceptions import NotFound from google.cloud.storage.blob import Blob from google.cloud.storage.retry import DEFAULT_RETRY from storages.backends import gcloud from storages.backends.gcloud import GoogleCloudFile class GCloudTestCase(TestCase): def setUp(self): self.bucket_name = "test_bucket" self.filename = "test_file.txt" self.storage = gcloud.GoogleCloudStorage(bucket_name=self.bucket_name) self.client_patcher = mock.patch("storages.backends.gcloud.Client") self.client_patcher.start() def tearDown(self): super().tearDown() self.client_patcher.stop() class GCloudStorageTests(GCloudTestCase): def test_open_read(self): """ Test opening a file and reading from it """ data = b"This is some test read data." with self.storage.open(self.filename) as f: self.storage._client.bucket.assert_called_with(self.bucket_name) self.storage._bucket.get_blob.assert_called_with( self.filename, chunk_size=None ) f.blob.download_to_file = lambda tmpfile, **kwargs: tmpfile.write(data) self.assertEqual(f.read(), data) def test_open_read_num_bytes(self): data = b"This is some test read data." num_bytes = 10 with self.storage.open(self.filename) as f: self.storage._client.bucket.assert_called_with(self.bucket_name) self.storage._bucket.get_blob.assert_called_with( self.filename, chunk_size=None ) f.blob.download_to_file = lambda tmpfile, **kwargs: tmpfile.write(data) self.assertEqual(f.read(num_bytes), data[0:num_bytes]) def test_open_read_nonexistent(self): self.storage._bucket = mock.MagicMock() self.storage._bucket.get_blob.return_value = None self.assertRaises(FileNotFoundError, self.storage.open, self.filename) self.storage._bucket.get_blob.assert_called_with(self.filename, chunk_size=None) def test_open_read_nonexistent_unicode(self): filename = "ủⓝï℅ⅆℇ.txt" self.storage._bucket = mock.MagicMock() self.storage._bucket.get_blob.return_value = None self.assertRaises(FileNotFoundError, self.storage.open, filename) @mock.patch("storages.backends.gcloud.Blob") def test_open_write(self, MockBlob): """ Test opening a file and writing to it """ data = "This is some test write data." # Simulate the file not existing before the write self.storage._bucket = mock.MagicMock() self.storage._bucket.get_blob.return_value = None self.storage.default_acl = "projectPrivate" f = self.storage.open(self.filename, "wb") MockBlob.assert_called_with( self.filename, self.storage._bucket, chunk_size=None ) f.write(data) tmpfile = f._file # File data is not actually written until close(), so do that. f.close() MockBlob().upload_from_file.assert_called_with( tmpfile, rewind=True, content_type=mimetypes.guess_type(self.filename)[0], retry=DEFAULT_RETRY, predefined_acl="projectPrivate", ) def test_save(self): data = "This is some test content." content = ContentFile(data) self.storage.save(self.filename, content) self.storage._client.bucket.assert_called_with(self.bucket_name) self.storage._bucket.get_blob().upload_from_file.assert_called_with( content, rewind=True, retry=DEFAULT_RETRY, size=len(data), content_type=mimetypes.guess_type(self.filename)[0], predefined_acl=None, ) def test_save2(self): data = "This is some test ủⓝï℅ⅆℇ content." filename = "ủⓝï℅ⅆℇ.txt" content = ContentFile(data) self.storage.save(filename, content) self.storage._client.bucket.assert_called_with(self.bucket_name) self.storage._bucket.get_blob().upload_from_file.assert_called_with( content, rewind=True, retry=DEFAULT_RETRY, size=len(data), content_type=mimetypes.guess_type(filename)[0], predefined_acl=None, ) def test_save_with_default_acl(self): data = "This is some test ủⓝï℅ⅆℇ content." filename = "ủⓝï℅ⅆℇ.txt" content = ContentFile(data) # ACL Options # 'projectPrivate', 'bucketOwnerRead', 'bucketOwnerFullControl', # 'private', 'authenticatedRead', 'publicRead', 'publicReadWrite' self.storage.default_acl = "publicRead" self.storage.save(filename, content) self.storage._client.bucket.assert_called_with(self.bucket_name) self.storage._bucket.get_blob().upload_from_file.assert_called_with( content, rewind=True, retry=DEFAULT_RETRY, size=len(data), content_type=mimetypes.guess_type(filename)[0], predefined_acl="publicRead", ) def test_delete(self): self.storage.delete(self.filename) self.storage._client.bucket.assert_called_with(self.bucket_name) self.storage._bucket.delete_blob.assert_called_with( self.filename, retry=DEFAULT_RETRY ) def test_exists(self): self.storage._bucket = mock.MagicMock() self.assertTrue(self.storage.exists(self.filename)) self.storage._bucket.get_blob.assert_called_with(self.filename) self.storage._bucket.reset_mock() self.storage._bucket.get_blob.return_value = None self.assertFalse(self.storage.exists(self.filename)) self.storage._bucket.get_blob.assert_called_with(self.filename) def test_exists_no_bucket(self): # exists('') should return False if the bucket doesn't exist self.storage._client = mock.MagicMock() self.storage._client.get_bucket.side_effect = NotFound("dang") self.assertFalse(self.storage.exists("")) def test_exists_bucket(self): # exists('') should return True if the bucket exists self.assertTrue(self.storage.exists("")) def test_listdir(self): file_names = ["some/path/1.txt", "2.txt", "other/path/3.txt", "4.txt"] subdir = "" self.storage._bucket = mock.MagicMock() blobs, prefixes = [], [] for name in file_names: directory = name.rsplit("/", 1)[0] + "/" if "/" in name else "" if directory == subdir: blob = mock.MagicMock(spec=Blob) blob.name = name.split("/")[-1] blobs.append(blob) else: prefixes.append(directory.split("/")[0] + "/") return_value = mock.MagicMock() return_value.__iter__ = mock.MagicMock(return_value=iter(blobs)) return_value.prefixes = prefixes self.storage._bucket.list_blobs.return_value = return_value dirs, files = self.storage.listdir("") self.assertEqual(len(dirs), 2) for directory in ["some", "other"]: self.assertTrue( directory in dirs, """ "{}" not in directory list "{}".""".format(directory, dirs), ) self.assertEqual(len(files), 2) for filename in ["2.txt", "4.txt"]: self.assertTrue( filename in files, """ "{}" not in file list "{}".""".format(filename, files), ) def test_listdir_subdir(self): file_names = ["some/path/1.txt", "some/2.txt"] subdir = "some/" self.storage._bucket = mock.MagicMock() blobs, prefixes = [], [] for name in file_names: directory = name.rsplit("/", 1)[0] + "/" if directory == subdir: blob = mock.MagicMock(spec=Blob) blob.name = name.split("/")[-1] blobs.append(blob) else: prefixes.append(directory.split(subdir)[1]) return_value = mock.MagicMock() return_value.__iter__ = mock.MagicMock(return_value=iter(blobs)) return_value.prefixes = prefixes self.storage._bucket.list_blobs.return_value = return_value dirs, files = self.storage.listdir(subdir) self.assertEqual(len(dirs), 1) self.assertTrue( "path" in dirs, """ "path" not in directory list "{}".""".format(dirs) ) self.assertEqual(len(files), 1) self.assertTrue( "2.txt" in files, """ "2.txt" not in files list "{}".""".format(files) ) def test_size(self): size = 1234 self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() blob.size = size self.storage._bucket.get_blob.return_value = blob self.assertEqual(self.storage.size(self.filename), size) self.storage._bucket.get_blob.assert_called_with(self.filename) def test_size_no_file(self): self.storage._bucket = mock.MagicMock() self.storage._bucket.get_blob.return_value = None self.assertRaises(NotFound, self.storage.size, self.filename) def test_get_modified_time(self): naive_date = datetime.datetime(2017, 1, 2, 3, 4, 5, 678) aware_date = timezone.make_aware(naive_date, datetime.timezone.utc) self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() blob.updated = aware_date self.storage._bucket.get_blob.return_value = blob with self.settings(TIME_ZONE="America/Montreal", USE_TZ=False): mt = self.storage.get_modified_time(self.filename) self.assertTrue(timezone.is_naive(mt)) naive_date_montreal = timezone.make_naive(aware_date) self.assertEqual(mt, naive_date_montreal) self.storage._bucket.get_blob.assert_called_with(self.filename) with self.settings(TIME_ZONE="America/Montreal", USE_TZ=True): mt = self.storage.get_modified_time(self.filename) self.assertTrue(timezone.is_aware(mt)) self.assertEqual(mt, aware_date) self.storage._bucket.get_blob.assert_called_with(self.filename) def test_get_created_time(self): naive_date = datetime.datetime(2017, 1, 2, 3, 4, 5, 678) aware_date = timezone.make_aware(naive_date, datetime.timezone.utc) self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() blob.time_created = aware_date self.storage._bucket.get_blob.return_value = blob with self.settings(TIME_ZONE="America/Montreal", USE_TZ=False): mt = self.storage.get_created_time(self.filename) self.assertTrue(timezone.is_naive(mt)) naive_date_montreal = timezone.make_naive(aware_date) self.assertEqual(mt, naive_date_montreal) self.storage._bucket.get_blob.assert_called_with(self.filename) with self.settings(TIME_ZONE="America/Montreal", USE_TZ=True): mt = self.storage.get_created_time(self.filename) self.assertTrue(timezone.is_aware(mt)) self.assertEqual(mt, aware_date) self.storage._bucket.get_blob.assert_called_with(self.filename) def test_url_public_object(self): url = "https://example.com/mah-bukkit/{}".format(self.filename) self.storage.default_acl = "publicRead" self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() blob.public_url = url blob.generate_signed_url = "not called" self.storage._bucket.blob.return_value = blob self.assertEqual(self.storage.url(self.filename), url) self.storage._bucket.blob.assert_called_with(self.filename) def test_url_not_public_file(self): secret_filename = "secret_file.txt" self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() generate_signed_url = mock.MagicMock(return_value="http://signed_url") blob.public_url = "http://this_is_public_url" blob.generate_signed_url = generate_signed_url self.storage._bucket.blob.return_value = blob url = self.storage.url(secret_filename) self.storage._bucket.blob.assert_called_with(secret_filename) self.assertEqual(url, "http://signed_url") blob.generate_signed_url.assert_called_with( expiration=timedelta(seconds=86400), version="v4" ) def test_url_not_public_file_with_custom_expires(self): secret_filename = "secret_file.txt" self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() generate_signed_url = mock.MagicMock(return_value="http://signed_url") blob.generate_signed_url = generate_signed_url self.storage._bucket.blob.return_value = blob self.storage.expiration = timedelta(seconds=3600) url = self.storage.url(secret_filename) self.storage._bucket.blob.assert_called_with(secret_filename) self.assertEqual(url, "http://signed_url") blob.generate_signed_url.assert_called_with( expiration=timedelta(seconds=3600), version="v4" ) def test_custom_endpoint_with_parameters(self): self.storage.custom_endpoint = "https://example.com" self.storage.default_acl = "publicRead" url = "{}/{}".format(self.storage.custom_endpoint, self.filename) self.assertEqual(self.storage.url(self.filename), url) bucket_name = "hyacinth" self.storage.default_acl = "projectPrivate" self.storage._bucket = mock.MagicMock() blob = mock.MagicMock() generate_signed_url = mock.MagicMock() blob.bucket = mock.MagicMock() type(blob.bucket).name = mock.PropertyMock(return_value=bucket_name) blob.generate_signed_url = generate_signed_url self.storage._bucket.blob.return_value = blob parameters = {"version": "v2", "method": "POST"} self.storage.url(self.filename, parameters=parameters) blob.generate_signed_url.assert_called_with( bucket_bound_hostname=self.storage.custom_endpoint, expiration=timedelta(seconds=86400), method="POST", version="v2", ) def test_get_available_name(self): self.storage.file_overwrite = True self.assertEqual(self.storage.get_available_name(self.filename), self.filename) self.storage._bucket = mock.MagicMock() self.storage._bucket.get_blob.return_value = None self.storage.file_overwrite = False self.assertEqual(self.storage.get_available_name(self.filename), self.filename) self.storage._bucket.get_blob.assert_called_with(self.filename) def test_get_available_name_unicode(self): filename = "ủⓝï℅ⅆℇ.txt" self.assertEqual(self.storage.get_available_name(filename), filename) def test_cache_control(self): data = "This is some test content." filename = "cache_control_file.txt" content = ContentFile(data) with override_settings( GS_OBJECT_PARAMETERS={"cache_control": "public, max-age=604800"} ): self.storage = gcloud.GoogleCloudStorage(bucket_name=self.bucket_name) self.storage.save(filename, content) bucket = self.storage.client.bucket(self.bucket_name) blob = bucket.get_blob(filename) self.assertEqual(blob.cache_control, "public, max-age=604800") def test_storage_save_gzip_twice(self): """Test saving the same file content twice with gzip enabled.""" # Given self.storage.gzip = True name = "test_storage_save.css" content = ContentFile("I should be gzip'd") # When self.storage.save(name, content) self.storage.save("test_storage_save_2.css", content) # Then self.storage._client.bucket.assert_called_with(self.bucket_name) obj = self.storage._bucket.get_blob() self.assertEqual(obj.content_encoding, "gzip") obj.upload_from_file.assert_called_with( mock.ANY, rewind=True, retry=DEFAULT_RETRY, size=None, predefined_acl=None, content_type="text/css", ) args, kwargs = obj.upload_from_file.call_args content = args[0] zfile = gzip.GzipFile(mode="rb", fileobj=content) self.assertEqual(zfile.read(), b"I should be gzip'd") def test_compress_content_len(self): """Test that file returned by _compress_content() is readable.""" self.storage.gzip = True content = ContentFile("I should be gzip'd") content = self.storage._compress_content(content) self.assertTrue(len(content.read()) > 0) def test_location_leading_slash(self): msg = ( "GoogleCloudStorage.location cannot begin with a leading slash. " "Found '/'. Use '' instead." ) with self.assertRaises(ImproperlyConfigured, msg=msg): gcloud.GoogleCloudStorage(location="/") def test_override_settings(self): with override_settings(GS_LOCATION="foo1"): storage = gcloud.GoogleCloudStorage() self.assertEqual(storage.location, "foo1") with override_settings(GS_LOCATION="foo2"): storage = gcloud.GoogleCloudStorage() self.assertEqual(storage.location, "foo2") def test_override_class_variable(self): class MyStorage1(gcloud.GoogleCloudStorage): location = "foo1" storage = MyStorage1() self.assertEqual(storage.location, "foo1") class MyStorage2(gcloud.GoogleCloudStorage): location = "foo2" storage = MyStorage2() self.assertEqual(storage.location, "foo2") def test_override_init_argument(self): storage = gcloud.GoogleCloudStorage(location="foo1") self.assertEqual(storage.location, "foo1") storage = gcloud.GoogleCloudStorage(location="foo2") self.assertEqual(storage.location, "foo2") def test_dupe_file_chunk_size(self): """ Tests that recreating a file that already exists in the bucket respects the `GS_BLOB_CHUNK_SIZE` setting """ chunk_size = 1024 * 256 with override_settings(GS_BLOB_CHUNK_SIZE=chunk_size): # Creating a new storage here since chunk-size is set as an # attribute on init storage = gcloud.GoogleCloudStorage() storage._bucket = mock.MagicMock() # Confirms that `get_blob` always returns a truthy value storage._bucket.get_blob.return_value = True storage.open(self.filename, "wb") storage._bucket.get_blob.assert_called_with( self.filename, chunk_size=chunk_size ) class GoogleCloudGzipClientTests(GCloudTestCase): def setUp(self): super().setUp() self.storage.gzip = True @mock.patch("google.cloud.storage.blob.Blob._do_upload") def test_storage_save_gzipped(self, *args): """ Test saving a gzipped file """ name = "test_storage_save.css.gz" content = ContentFile("I am gzip'd", name=name) blob = Blob("x", None) blob.upload_from_file = mock.MagicMock(side_effect=blob.upload_from_file) patcher = mock.patch("google.cloud.storage.Bucket.get_blob", return_value=blob) try: patcher.start() self.storage.save(name, content) obj = self.storage._bucket.get_blob() obj.upload_from_file.assert_called_with( mock.ANY, rewind=True, retry=DEFAULT_RETRY, size=11, predefined_acl=None, content_type="text/css", ) finally: patcher.stop() @mock.patch("google.cloud.storage.blob.Blob._do_upload") def test_storage_save_gzip(self, *args): """ Test saving a file with gzip enabled. """ name = "test_storage_save.css" content = ContentFile("I should be gzip'd") blob = Blob("x", None) blob.upload_from_file = mock.MagicMock(side_effect=blob.upload_from_file) patcher = mock.patch("google.cloud.storage.Bucket.get_blob", return_value=blob) try: patcher.start() self.storage.save(name, content) obj = self.storage._bucket.get_blob() obj.upload_from_file.assert_called_with( mock.ANY, rewind=True, retry=DEFAULT_RETRY, size=None, predefined_acl=None, content_type="text/css", ) args, kwargs = obj.upload_from_file.call_args content = args[0] zfile = gzip.GzipFile(mode="rb", fileobj=content) self.assertEqual(zfile.read(), b"I should be gzip'd") finally: patcher.stop() def test_storage_read_gzip(self, *args): """ Test reading a gzipped file decompresses content only once. """ name = "test_storage_save.css" file = GoogleCloudFile(name, "rb", self.storage) blob = mock.MagicMock() file.blob = blob blob.download_to_file = lambda f, checksum=None: f.write(b"No gzip") blob.content_encoding = "gzip" f = file._get_file() f.read() # This should not fail django-storages-1.14.5/tests/test_s3.py000066400000000000000000001246341475414346200200150ustar00rootroot00000000000000import datetime import gzip import io import os import pickle import threading from textwrap import dedent from unittest import mock from unittest import skipIf from urllib.parse import urlparse import boto3 import boto3.s3.transfer import botocore from botocore.config import Config as ClientConfig from botocore.exceptions import ClientError from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile from django.core.files.base import File from django.test import TestCase from django.test import override_settings from django.utils.timezone import is_aware from moto import mock_s3 from storages.backends import s3 from tests.utils import NonSeekableContentFile class S3ManifestStaticStorageTestStorage(s3.S3ManifestStaticStorage): def read_manifest(self): return None class S3StorageTests(TestCase): def setUp(self): self.storage = s3.S3Storage() self.storage._connections.connection = mock.MagicMock() self.storage._unsigned_connections.connection = mock.MagicMock() @mock.patch("boto3.Session") def test_s3_session(self, session): with override_settings(AWS_S3_SESSION_PROFILE="test_profile"): storage = s3.S3Storage() _ = storage.connection session.assert_called_once_with(profile_name="test_profile") @mock.patch("boto3.Session.resource") def test_client_config(self, resource): with override_settings( AWS_S3_CLIENT_CONFIG=ClientConfig(max_pool_connections=30) ): storage = s3.S3Storage() _ = storage.connection resource.assert_called_once() self.assertEqual(30, resource.call_args[1]["config"].max_pool_connections) @mock.patch("boto3.Session.resource") def test_connection_unsiged(self, resource): with override_settings(AWS_S3_ADDRESSING_STYLE="virtual"): storage = s3.S3Storage() _ = storage.unsigned_connection resource.assert_called_once() self.assertEqual( botocore.UNSIGNED, resource.call_args[1]["config"].signature_version ) self.assertEqual( "virtual", resource.call_args[1]["config"].s3["addressing_style"] ) def test_pickle_with_bucket(self): """ Test that the storage can be pickled with a bucket attached """ # Ensure the bucket has been used self.storage.bucket self.assertIsNotNone(self.storage._bucket) # Can't pickle MagicMock, but you can't pickle a real Bucket object either p = pickle.dumps(self.storage) new_storage = pickle.loads(p) self.assertIsInstance(new_storage._connections, threading.local) # Put the mock connection back in new_storage._connections.connection = mock.MagicMock() self.assertIsNone(new_storage._bucket) new_storage.bucket self.assertIsNotNone(new_storage._bucket) def test_pickle_without_bucket(self): """ Test that the storage can be pickled, without a bucket instance """ # Can't pickle a threadlocal p = pickle.dumps(self.storage) new_storage = pickle.loads(p) self.assertIsInstance(new_storage._connections, threading.local) def test_storage_url_slashes(self): """ Test URL generation. """ self.storage.custom_domain = "example.com" # We expect no leading slashes in the path, # and trailing slashes should be preserved. self.assertEqual(self.storage.url(""), "https://example.com/") self.assertEqual(self.storage.url("path"), "https://example.com/path") self.assertEqual(self.storage.url("path/"), "https://example.com/path/") self.assertEqual(self.storage.url("path/1"), "https://example.com/path/1") self.assertEqual(self.storage.url("path/1/"), "https://example.com/path/1/") def test_storage_save(self): """ Test saving a file """ name = "test_storage_save.txt" content = ContentFile("new content") self.storage.save(name, content) self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "text/plain", }, Config=self.storage.transfer_config, ) def test_storage_save_non_seekable(self): """ Test saving a non-seekable file """ name = "test_storage_save.txt" content = NonSeekableContentFile("new content") self.storage.save(name, content) self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "text/plain", }, Config=self.storage.transfer_config, ) def test_storage_save_with_default_acl(self): """ Test saving a file with user defined ACL. """ name = "test_storage_save.txt" content = ContentFile("new content") self.storage.default_acl = "private" self.storage.save(name, content) self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "text/plain", "ACL": "private", }, Config=self.storage.transfer_config, ) def test_storage_object_parameters_not_overwritten_by_default(self): """ Test saving a file with user defined ACL. """ name = "test_storage_save.txt" content = ContentFile("new content") self.storage.default_acl = "public-read" self.storage.object_parameters = {"ACL": "private"} self.storage.save(name, content) self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "text/plain", "ACL": "private", }, Config=self.storage.transfer_config, ) def test_content_type(self): """ Test saving a file with a None content type. """ name = "test_image.jpg" content = ContentFile("data") content.content_type = None self.storage.save(name, content) self.storage.bucket.Object.assert_called_once_with(name) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "image/jpeg", }, Config=self.storage.transfer_config, ) def test_storage_save_gzipped(self): """ Test saving a gzipped file """ name = "test_storage_save.gz" content = ContentFile("I am gzip'd") self.storage.save(name, content) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_once_with( mock.ANY, ExtraArgs={ "ContentType": "application/octet-stream", "ContentEncoding": "gzip", }, Config=self.storage.transfer_config, ) def test_content_type_set_explicitly(self): name = "test_file.gz" content = ContentFile("data") def get_object_parameters(name): return {"ContentType": "application/gzip"} self.storage.get_object_parameters = get_object_parameters self.storage.save(name, content) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "application/gzip", }, Config=self.storage.transfer_config, ) def test_storage_save_gzipped_non_seekable(self): """ Test saving a gzipped file """ name = "test_storage_save.gz" content = NonSeekableContentFile("I am gzip'd") self.storage.save(name, content) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_once_with( mock.ANY, ExtraArgs={ "ContentType": "application/octet-stream", "ContentEncoding": "gzip", }, Config=self.storage.transfer_config, ) def test_storage_save_gzip(self): """ Test saving a file with gzip enabled. """ self.storage.gzip = True name = "test_storage_save.css" content = ContentFile("I should be gzip'd") self.storage.save(name, content) obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "text/css", "ContentEncoding": "gzip", }, Config=self.storage.transfer_config, ) args, kwargs = obj.upload_fileobj.call_args content = args[0] zfile = gzip.GzipFile(mode="rb", fileobj=content) self.assertEqual(zfile.read(), b"I should be gzip'd") def test_storage_save_gzip_twice(self): """ Test saving the same file content twice with gzip enabled. """ # Given self.storage.gzip = True name = "test_storage_save.css" content = ContentFile("I should be gzip'd") # When self.storage.save(name, content) self.storage.save("test_storage_save_2.css", content) # Then obj = self.storage.bucket.Object.return_value obj.upload_fileobj.assert_called_with( mock.ANY, ExtraArgs={ "ContentType": "text/css", "ContentEncoding": "gzip", }, Config=self.storage.transfer_config, ) args, kwargs = obj.upload_fileobj.call_args content = args[0] zfile = gzip.GzipFile(mode="rb", fileobj=content) self.assertEqual(zfile.read(), b"I should be gzip'd") def test_compress_content_len(self): """ Test that file returned by _compress_content() is readable. """ self.storage.gzip = True content = ContentFile(b"I should be gzip'd") content = self.storage._compress_content(content) self.assertTrue(len(content.read()) > 0) def test_storage_open_read_string(self): """ Test opening a file in "r" mode (ie reading as string, not bytes) """ name = "test_open_read_string.txt" with self.storage.open(name, "r") as file: content_str = file.read() self.assertEqual(content_str, "") def test_storage_open_write(self): """ Test opening a file in write mode """ name = "test_open_for_writïng.txt" content = "new content" # Set the encryption flag used for multipart uploads self.storage.object_parameters = { "ServerSideEncryption": "AES256", "StorageClass": "REDUCED_REDUNDANCY", "ACL": "public-read", } with self.storage.open(name, "wb") as file: self.storage.bucket.Object.assert_called_with(name) obj = self.storage.bucket.Object.return_value # Set the name of the mock object obj.key = name multipart = obj.initiate_multipart_upload.return_value multipart.Part.return_value.upload.side_effect = [ {"ETag": "123"}, ] file.write(content) obj.initiate_multipart_upload.assert_called_with( ACL="public-read", ContentType="text/plain", ServerSideEncryption="AES256", StorageClass="REDUCED_REDUNDANCY", ) multipart.Part.assert_called_with(1) part = multipart.Part.return_value part.upload.assert_called_with(Body=content.encode()) multipart.complete.assert_called_once_with( MultipartUpload={"Parts": [{"ETag": "123", "PartNumber": 1}]} ) def test_write_bytearray(self): """Test that bytearray write exactly (no extra "bytearray" from stringify).""" name = "saved_file.bin" content = bytearray(b"content") with self.storage.open(name, "wb") as file: obj = self.storage.bucket.Object.return_value # Set the name of the mock object obj.key = name bytes_written = file.write(content) self.assertEqual(len(content), bytes_written) def test_storage_open_no_write(self): """ Test opening file in write mode and closing without writing. A file should be created as by obj.put(...). """ name = "test_open_no_write.txt" # Set the encryption flag used for puts self.storage.object_parameters = { "ServerSideEncryption": "AES256", "StorageClass": "REDUCED_REDUNDANCY", } with self.storage.open(name, "wb"): self.storage.bucket.Object.assert_called_with(name) obj = self.storage.bucket.Object.return_value obj.load.side_effect = ClientError( {"Error": {}, "ResponseMetadata": {"HTTPStatusCode": 404}}, "head_bucket", ) # Set the name of the mock object obj.key = name obj.load.assert_called_once_with() obj.put.assert_called_once_with( Body=b"", ContentType="text/plain", ServerSideEncryption="AES256", StorageClass="REDUCED_REDUNDANCY", ) def test_storage_open_no_overwrite_existing(self): """ Test opening an existing file in write mode and closing without writing. """ name = "test_open_no_overwrite_existing.txt" # Set the encryption flag used for puts self.storage.object_parameters = { "ServerSideEncryption": "AES256", "StorageClass": "REDUCED_REDUNDANCY", } with self.storage.open(name, "wb"): self.storage.bucket.Object.assert_called_with(name) obj = self.storage.bucket.Object.return_value # Set the name of the mock object obj.key = name obj.load.assert_called_once_with() obj.put.assert_not_called() def test_storage_write_beyond_buffer_size(self): """ Test writing content that exceeds the buffer size """ name = "test_open_for_writïng_beyond_buffer_size.txt" # Set the encryption flag used for multipart uploads self.storage.object_parameters = { "ServerSideEncryption": "AES256", "StorageClass": "REDUCED_REDUNDANCY", } with self.storage.open(name, "wb") as file: self.storage.bucket.Object.assert_called_with(name) obj = self.storage.bucket.Object.return_value # Set the name of the mock object obj.key = name # Initiate the multipart upload file.write("") obj.initiate_multipart_upload.assert_called_with( ContentType="text/plain", ServerSideEncryption="AES256", StorageClass="REDUCED_REDUNDANCY", ) multipart = obj.initiate_multipart_upload.return_value # Write content at least twice as long as the buffer size written_content = "" counter = 1 multipart.Part.return_value.upload.side_effect = [ {"ETag": "123"}, {"ETag": "456"}, ] while len(written_content) < 2 * file.buffer_size: content = "hello, aws {counter}\n".format(counter=counter) # Write more than just a few bytes in each iteration to keep the # test reasonably fast content += "*" * int(file.buffer_size / 10) file.write(content) written_content += content counter += 1 self.assertListEqual( multipart.Part.call_args_list, [mock.call(1), mock.call(2)] ) part = multipart.Part.return_value uploaded_content = "".join( args_list[1]["Body"].decode() for args_list in part.upload.call_args_list ) self.assertEqual(uploaded_content, written_content) multipart.complete.assert_called_once_with( MultipartUpload={ "Parts": [ {"ETag": "123", "PartNumber": 1}, {"ETag": "456", "PartNumber": 2}, ] } ) def test_storage_exists(self): self.assertTrue(self.storage.exists("file.txt")) self.storage.connection.meta.client.head_object.assert_called_with( Bucket=self.storage.bucket_name, Key="file.txt", ) def test_storage_exists_false(self): self.storage.connection.meta.client.head_object.side_effect = ClientError( {"Error": {}, "ResponseMetadata": {"HTTPStatusCode": 404}}, "HeadObject", ) self.assertFalse(self.storage.exists("file.txt")) self.storage.connection.meta.client.head_object.assert_called_with( Bucket=self.storage.bucket_name, Key="file.txt", ) def test_storage_exists_other_error_reraise(self): self.storage.connection.meta.client.head_object.side_effect = ClientError( {"Error": {}, "ResponseMetadata": {"HTTPStatusCode": 403}}, "HeadObject", ) with self.assertRaises(ClientError) as cm: self.storage.exists("file.txt") self.assertEqual( cm.exception.response["ResponseMetadata"]["HTTPStatusCode"], 403 ) def test_storage_delete(self): self.storage.delete("path/to/file.txt") self.storage.bucket.Object.assert_called_with("path/to/file.txt") self.storage.bucket.Object.return_value.delete.assert_called_with() def test_storage_delete_does_not_exist(self): self.storage.bucket.Object("file.txt").delete.side_effect = ClientError( {"Error": {}, "ResponseMetadata": {"HTTPStatusCode": 404}}, "DeleteObject", ) self.storage.delete("file.txt") # No problem def test_storage_delete_other_error_reraise(self): self.storage.bucket.Object("file.txt").delete.side_effect = ClientError( {"Error": {}, "ResponseMetadata": {"HTTPStatusCode": 403}}, "DeleteObject", ) with self.assertRaises(ClientError) as cm: self.storage.delete("file.txt") self.assertEqual( cm.exception.response["ResponseMetadata"]["HTTPStatusCode"], 403 ) def test_storage_listdir_base(self): # Files: # some/path/1.txt # 2.txt # other/path/3.txt # 4.txt pages = [ { "CommonPrefixes": [ {"Prefix": "some"}, {"Prefix": "other"}, ], "Contents": [ {"Key": "2.txt"}, {"Key": "4.txt"}, ], }, ] paginator = mock.MagicMock() paginator.paginate.return_value = pages self.storage._connections.connection.meta.client.get_paginator.return_value = ( paginator ) dirs, files = self.storage.listdir("") paginator.paginate.assert_called_with( Bucket=settings.AWS_STORAGE_BUCKET_NAME, Delimiter="/", Prefix="" ) self.assertEqual(dirs, ["some", "other"]) self.assertEqual(files, ["2.txt", "4.txt"]) def test_storage_listdir_subdir(self): # Files: # some/path/1.txt # some/2.txt pages = [ { "CommonPrefixes": [ {"Prefix": "some/path"}, ], "Contents": [ {"Key": "some/2.txt"}, ], }, ] paginator = mock.MagicMock() paginator.paginate.return_value = pages self.storage._connections.connection.meta.client.get_paginator.return_value = ( paginator ) dirs, files = self.storage.listdir("some/") paginator.paginate.assert_called_with( Bucket=settings.AWS_STORAGE_BUCKET_NAME, Delimiter="/", Prefix="some/" ) self.assertEqual(dirs, ["path"]) self.assertEqual(files, ["2.txt"]) def test_storage_listdir_empty(self): # Files: # dir/ pages = [ { "Contents": [ {"Key": "dir/"}, ], }, ] paginator = mock.MagicMock() paginator.paginate.return_value = pages self.storage._connections.connection.meta.client.get_paginator.return_value = ( paginator ) dirs, files = self.storage.listdir("dir/") paginator.paginate.assert_called_with( Bucket=settings.AWS_STORAGE_BUCKET_NAME, Delimiter="/", Prefix="dir/" ) self.assertEqual(dirs, []) self.assertEqual(files, []) def test_storage_size(self): obj = self.storage.bucket.Object.return_value obj.content_length = 4098 name = "file.txt" self.assertEqual(self.storage.size(name), obj.content_length) def test_storage_size_not_exists(self): self.storage.bucket.Object.side_effect = ClientError( {"Error": {}, "ResponseMetadata": {"HTTPStatusCode": 404}}, "HeadObject", ) name = "file.txt" with self.assertRaisesMessage( FileNotFoundError, "File does not exist: file.txt" ): self.storage.size(name) def test_storage_mtime(self): # Test both USE_TZ cases for use_tz in (True, False): with self.settings(USE_TZ=use_tz): self._test_storage_mtime(use_tz) def _test_storage_mtime(self, use_tz): obj = self.storage.bucket.Object.return_value obj.last_modified = datetime.datetime.now(datetime.timezone.utc) name = "file.txt" self.assertIs( settings.USE_TZ, is_aware(self.storage.get_modified_time(name)), ( "{} datetime object expected from get_modified_time() when USE_TZ={}" ).format(("Naive", "Aware")[settings.USE_TZ], settings.USE_TZ), ) def test_storage_url(self): name = "test_storage_size.txt" url = "http://aws.amazon.com/%s" % name self.storage.connection.meta.client.generate_presigned_url.return_value = url self.storage.bucket.name = "bucket" self.assertEqual(self.storage.url(name), url) self.storage.connection.meta.client.generate_presigned_url.assert_called_with( "get_object", Params={"Bucket": self.storage.bucket.name, "Key": name}, ExpiresIn=self.storage.querystring_expire, HttpMethod=None, ) custom_expire = 123 self.assertEqual(self.storage.url(name, expire=custom_expire), url) self.storage.connection.meta.client.generate_presigned_url.assert_called_with( "get_object", Params={"Bucket": self.storage.bucket.name, "Key": name}, ExpiresIn=custom_expire, HttpMethod=None, ) custom_method = "HEAD" self.assertEqual(self.storage.url(name, http_method=custom_method), url) self.storage.connection.meta.client.generate_presigned_url.assert_called_with( "get_object", Params={"Bucket": self.storage.bucket.name, "Key": name}, ExpiresIn=self.storage.querystring_expire, HttpMethod=custom_method, ) def test_url_unsigned(self): self.storage.querystring_auth = False self.storage.url("test_name") self.storage.unsigned_connection.meta.client.generate_presigned_url.assert_called_once() @mock.patch("storages.backends.s3.datetime") def test_storage_url_custom_domain_signed_urls(self, dt): key_id = "test-key" filename = "file.txt" pem = dedent( """\ -----BEGIN RSA PRIVATE KEY----- MIICWwIBAAKBgQCXVuwcMk+JmVSKuQ1K4dZx4Z1dEcRQgTlqvhAyljIpttXlZh2/ fD3GkJCiqfwEmo+cdNK/LFzRj/CX8Wz1z1lH2USONpG6sAkotkatCbejiItDu5y6 janGJHfuWXu6B/o9gwZylU1gIsePY3lLNk+r9QhXUO4jXw6zLJftVwKPhQIDAQAB AoGAbpkRV9HUmoQ5al+uPSkp5HOy4s8XHpYxdbaMc8ubwSxiyJCF8OhE5RXE/Xso N90UUox1b0xmUKfWddPzgvgTD/Ub7D6Ukf+nVWDX60tWgNxICAUHptGL3tWweaAy H+0+vZ0TzvTt9r00vW0FzO7F8X9/Rs1ntDRLtF3RCCxdq0kCQQDHFu+t811lCvy/ 67rMEKGvNsNNSTrzOrNr3PqUrCnOrzKazjFVjsKv5VzI/U+rXGYKWJsMpuCFiHZ3 DILUC09TAkEAwpm2S6MN6pzn9eY6pmhOxZ+GQGGRUkKZfC1GDxaRSRb8sKTjptYw WSemJSxiDzdj3Po2hF0lbhkpJgUq6xnCxwJAZgHHfn5CLSJrDD7Q7/vZi/foK3JJ BRTfl3Wa4pAvv5meuRjKyEakVBGV79lyd5+ZHNX3Y40hXunjoO3FHrZIxwJAdRzu waxahrRxQOKSr20c4wAzWnGddIUSO9I/VHs/al5EKsbBHrnOlQkwizSfuwqZtfZ7 csNf8FeCFRiNELoLJwJAZxWBE2+8J9VW9AQ0SE7j4FyM/B8FvRhF5PLAAsw/OxHO SxiFP7Ptdac1tm5H5zOqaqSHWphI19HNNilXKmxuCA== -----END RSA PRIVATE KEY-----""" ).encode("ascii") url = "https://mock.cloudfront.net/file.txt" signed_url = ( url + "?Expires=3600&Signature=DbqVgh3FHtttQxof214tSAVE8Nqn3Q4Ii7eR3iykbOqAPbV" "89HC3EB~0CWxarpLNtbfosS5LxiP5EutriM7E8uR4Gm~UVY-PFUjPcwqdnmAiKJF0EVs7koJc" "MR8MKDStuWfFKVUPJ8H7ORYTOrixyHBV2NOrpI6SN5UX6ctNM50_&Key-Pair-Id=test-key" ) self.storage.custom_domain = "mock.cloudfront.net" for pem_to_signer in (s3._use_cryptography_signer(), s3._use_rsa_signer()): self.storage.cloudfront_signer = pem_to_signer(key_id, pem) self.storage.querystring_auth = False self.assertEqual(self.storage.url(filename), url) self.storage.querystring_auth = True dt.utcnow.return_value = datetime.datetime.utcfromtimestamp(0) self.assertEqual(self.storage.url(filename), signed_url) def test_generated_url_is_encoded(self): self.storage.custom_domain = "mock.cloudfront.net" filename = "whacky & filename.mp4" url = self.storage.url(filename) parsed_url = urlparse(url) self.assertEqual(parsed_url.path, "/whacky%20%26%20filename.mp4") self.assertFalse(self.storage.bucket.meta.client.generate_presigned_url.called) def test_special_characters(self): self.storage.custom_domain = "mock.cloudfront.net" name = "ãlöhâ.jpg" content = ContentFile("new content") self.storage.save(name, content) self.storage.bucket.Object.assert_called_once_with(name) url = self.storage.url(name) parsed_url = urlparse(url) self.assertEqual(parsed_url.path, "/%C3%A3l%C3%B6h%C3%A2.jpg") def test_custom_domain_parameters(self): self.storage.custom_domain = "mock.cloudfront.net" filename = "filename.mp4" url = self.storage.url(filename, parameters={"version": 10}) parsed_url = urlparse(url) self.assertEqual(parsed_url.path, "/filename.mp4") self.assertEqual(parsed_url.query, "version=10") @skipIf(threading is None, "Test requires threading") def test_connection_threading(self): connections = [] def thread_storage_connection(): connections.append(self.storage.connection) for _ in range(2): t = threading.Thread(target=thread_storage_connection) t.start() t.join() # Connection for each thread needs to be unique self.assertIsNot(connections[0], connections[1]) def test_location_leading_slash(self): msg = ( "S3Storage.location cannot begin with a leading slash. " "Found '/'. Use '' instead." ) with self.assertRaises(ImproperlyConfigured, msg=msg): s3.S3Storage(location="/") def test_override_settings(self): with override_settings(AWS_LOCATION="foo1"): storage = s3.S3Storage() self.assertEqual(storage.location, "foo1") with override_settings(AWS_LOCATION="foo2"): storage = s3.S3Storage() self.assertEqual(storage.location, "foo2") def test_override_class_variable(self): class MyStorage1(s3.S3Storage): location = "foo1" storage = MyStorage1() self.assertEqual(storage.location, "foo1") class MyStorage2(s3.S3Storage): location = "foo2" storage = MyStorage2() self.assertEqual(storage.location, "foo2") def test_override_init_argument(self): storage = s3.S3Storage(location="foo1") self.assertEqual(storage.location, "foo1") storage = s3.S3Storage(location="foo2") self.assertEqual(storage.location, "foo2") def test_use_threads_false(self): with override_settings(AWS_S3_USE_THREADS=False): storage = s3.S3Storage() self.assertFalse(storage.transfer_config.use_threads) def test_transfer_config(self): storage = s3.S3Storage() self.assertTrue(storage.transfer_config.use_threads) transfer_config = boto3.s3.transfer.TransferConfig(use_threads=False) with override_settings(AWS_S3_TRANSFER_CONFIG=transfer_config): storage = s3.S3Storage() self.assertFalse(storage.transfer_config.use_threads) def test_cloudfront_config(self): # Valid configs storage = s3.S3Storage() self.assertIsNone(storage.cloudfront_signer) key_id = "test-id" pem = dedent( """\ -----BEGIN RSA PRIVATE KEY----- MIICWwIBAAKBgQCXVuwcMk+JmVSKuQ1K4dZx4Z1dEcRQgTlqvhAyljIpttXlZh2/ fD3GkJCiqfwEmo+cdNK/LFzRj/CX8Wz1z1lH2USONpG6sAkotkatCbejiItDu5y6 janGJHfuWXu6B/o9gwZylU1gIsePY3lLNk+r9QhXUO4jXw6zLJftVwKPhQIDAQAB AoGAbpkRV9HUmoQ5al+uPSkp5HOy4s8XHpYxdbaMc8ubwSxiyJCF8OhE5RXE/Xso N90UUox1b0xmUKfWddPzgvgTD/Ub7D6Ukf+nVWDX60tWgNxICAUHptGL3tWweaAy H+0+vZ0TzvTt9r00vW0FzO7F8X9/Rs1ntDRLtF3RCCxdq0kCQQDHFu+t811lCvy/ 67rMEKGvNsNNSTrzOrNr3PqUrCnOrzKazjFVjsKv5VzI/U+rXGYKWJsMpuCFiHZ3 DILUC09TAkEAwpm2S6MN6pzn9eY6pmhOxZ+GQGGRUkKZfC1GDxaRSRb8sKTjptYw WSemJSxiDzdj3Po2hF0lbhkpJgUq6xnCxwJAZgHHfn5CLSJrDD7Q7/vZi/foK3JJ BRTfl3Wa4pAvv5meuRjKyEakVBGV79lyd5+ZHNX3Y40hXunjoO3FHrZIxwJAdRzu waxahrRxQOKSr20c4wAzWnGddIUSO9I/VHs/al5EKsbBHrnOlQkwizSfuwqZtfZ7 csNf8FeCFRiNELoLJwJAZxWBE2+8J9VW9AQ0SE7j4FyM/B8FvRhF5PLAAsw/OxHO SxiFP7Ptdac1tm5H5zOqaqSHWphI19HNNilXKmxuCA== -----END RSA PRIVATE KEY-----""" ).encode("ascii") with override_settings(AWS_CLOUDFRONT_KEY_ID=key_id, AWS_CLOUDFRONT_KEY=pem): storage = s3.S3Storage() self.assertIsNotNone(storage.cloudfront_signer) # allow disabling cloudfront signing storage = s3.S3Storage(cloudfront_signer=None) self.assertIsNone(storage.cloudfront_signer) # allow disabling cloudfront signing in subclass class Storage(s3.S3Storage): cloudfront_signer = None self.assertIsNone(Storage().cloudfront_signer) storage = s3.S3Storage(cloudfront_key_id=key_id, cloudfront_key=pem) self.assertIsNotNone(storage.cloudfront_signer) cloudfront_signer = storage.get_cloudfront_signer(key_id, pem) storage = s3.S3Storage(cloudfront_signer=cloudfront_signer) self.assertIsNotNone(storage.cloudfront_signer) with override_settings(AWS_CLOUDFRONT_KEY_ID=key_id): storage = s3.S3Storage(cloudfront_key=pem) self.assertIsNotNone(storage.cloudfront_signer) # Invalid configs msg = ( "Both AWS_CLOUDFRONT_KEY_ID/cloudfront_key_id and " "AWS_CLOUDFRONT_KEY/cloudfront_key must be provided together." ) with override_settings(AWS_CLOUDFRONT_KEY_ID=key_id): with self.assertRaisesMessage(ImproperlyConfigured, msg): storage = s3.S3Storage() with override_settings(AWS_CLOUDFRONT_KEY=pem): with self.assertRaisesMessage(ImproperlyConfigured, msg): storage = s3.S3Storage() with self.assertRaisesMessage(ImproperlyConfigured, msg): storage = s3.S3Storage(cloudfront_key_id=key_id) with self.assertRaisesMessage(ImproperlyConfigured, msg): storage = s3.S3Storage(cloudfront_key=pem) def test_auth_config(self): # Valid configs with override_settings( AWS_S3_ACCESS_KEY_ID="foo", AWS_S3_SECRET_ACCESS_KEY="boo" ): storage = s3.S3Storage() self.assertEqual(storage.access_key, "foo") self.assertEqual(storage.secret_key, "boo") with override_settings(AWS_ACCESS_KEY_ID="foo", AWS_SECRET_ACCESS_KEY="boo"): storage = s3.S3Storage() self.assertEqual(storage.access_key, "foo") self.assertEqual(storage.secret_key, "boo") with mock.patch.dict( os.environ, {"AWS_S3_ACCESS_KEY_ID": "foo", "AWS_S3_SECRET_ACCESS_KEY": "boo"}, ): storage = s3.S3Storage() self.assertEqual(storage.access_key, "foo") self.assertEqual(storage.secret_key, "boo") with mock.patch.dict( os.environ, {"AWS_ACCESS_KEY_ID": "foo", "AWS_SECRET_ACCESS_KEY": "boo"} ): storage = s3.S3Storage() self.assertEqual(storage.access_key, "foo") self.assertEqual(storage.secret_key, "boo") storage = s3.S3Storage(access_key="foo", secret_key="boo") self.assertEqual(storage.access_key, "foo") self.assertEqual(storage.secret_key, "boo") # Invalid configs msg = ( "AWS_S3_SESSION_PROFILE/session_profile should not be provided with " "AWS_S3_ACCESS_KEY_ID/access_key and AWS_S3_SECRET_ACCESS_KEY/secret_key" ) with override_settings( AWS_ACCESS_KEY_ID="foo", AWS_SECRET_ACCESS_KEY="boo", AWS_S3_SESSION_PROFILE="moo", ): with self.assertRaisesMessage(ImproperlyConfigured, msg): storage = s3.S3Storage() with self.assertRaisesMessage(ImproperlyConfigured, msg): storage = s3.S3Storage( access_key="foo", secret_key="boo", session_profile="moo" ) def test_security_token(self): with override_settings(AWS_SESSION_TOKEN="baz"): storage = s3.S3Storage() self.assertEqual(storage.security_token, "baz") with override_settings(AWS_SECURITY_TOKEN="baz"): storage = s3.S3Storage() self.assertEqual(storage.security_token, "baz") with mock.patch.dict( os.environ, {"AWS_SESSION_TOKEN": "baz"}, ): storage = s3.S3Storage() self.assertEqual(storage.security_token, "baz") with mock.patch.dict( os.environ, {"AWS_SECURITY_TOKEN": "baz"}, ): storage = s3.S3Storage() self.assertEqual(storage.security_token, "baz") class S3StaticStorageTests(TestCase): def setUp(self): self.storage = s3.S3StaticStorage() self.storage._connections.connection = mock.MagicMock() def test_querystring_auth(self): self.assertFalse(self.storage.querystring_auth) class S3ManifestStaticStorageTests(TestCase): def setUp(self): self.storage = S3ManifestStaticStorageTestStorage() self.storage._connections.connection = mock.MagicMock() def test_querystring_auth(self): self.assertFalse(self.storage.querystring_auth) def test_save(self): self.storage.save("x.txt", ContentFile(b"abc")) class S3FileTests(TestCase): # Remove the override_settings after Python3.7 is dropped @override_settings(AWS_S3_OBJECT_PARAMETERS={"ContentType": "text/html"}) def setUp(self) -> None: self.storage = s3.S3Storage() self.storage._connections.connection = mock.MagicMock() def test_loading_ssec(self): params = {"SSECustomerKey": "xyz", "CacheControl": "never"} self.storage.get_object_parameters = lambda name: params filtered = {"SSECustomerKey": "xyz"} f = s3.S3File("test", "r", self.storage) f.obj.load.assert_called_once_with(**filtered) f.file f.obj.download_fileobj.assert_called_once_with( mock.ANY, ExtraArgs=filtered, Config=self.storage.transfer_config ) def test_closed(self): with s3.S3File("test", "wb", self.storage) as f: with self.subTest("after init"): self.assertFalse(f.closed) with self.subTest("after file access"): # Ensure _get_file has been called f.file self.assertFalse(f.closed) with self.subTest("after close"): f.close() self.assertTrue(f.closed) with self.subTest("reopening"): f.file self.assertFalse(f.closed) def test_reopening(self): f = s3.S3File("test", "wb", self.storage) with f.open() as fp: fp.write(b"xyz") with f.open() as fp: fp.write(b"xyz") # Properties are reset self.assertEqual(f._write_counter, 0) self.assertEqual(f._raw_bytes_written, 0) self.assertFalse(f._is_dirty) self.assertIsNone(f._multipart) @mock_s3 class S3StorageTestsWithMoto(TestCase): """ Using mock_s3 as a class decorator automatically decorates methods, but NOT classmethods or staticmethods. """ def setUp(cls): super().setUp() cls.storage = s3.S3Storage() cls.bucket = cls.storage.connection.Bucket(settings.AWS_STORAGE_BUCKET_NAME) cls.bucket.create() def test_save_bytes_file(self): self.storage.save("bytes_file.txt", File(io.BytesIO(b"foo1"))) self.assertEqual( b"foo1", self.bucket.Object("bytes_file.txt").get()["Body"].read(), ) def test_save_string_file(self): self.storage.save("string_file.txt", File(io.StringIO("foo2"))) self.assertEqual( b"foo2", self.bucket.Object("string_file.txt").get()["Body"].read(), ) def test_save_bytes_content_file(self): self.storage.save("bytes_content.txt", ContentFile(b"foo3")) self.assertEqual( b"foo3", self.bucket.Object("bytes_content.txt").get()["Body"].read(), ) def test_save_string_content_file(self): self.storage.save("string_content.txt", ContentFile("foo4")) self.assertEqual( b"foo4", self.bucket.Object("string_content.txt").get()["Body"].read(), ) def test_content_type_guess(self): """ Test saving a file where the ContentType is guessed from the filename. """ name = "test_image.jpg" content = ContentFile(b"data") content.content_type = None self.storage.save(name, content) s3_object_fetched = self.bucket.Object(name).get() self.assertEqual(b"data", s3_object_fetched["Body"].read()) self.assertEqual(s3_object_fetched["ContentType"], "image/jpeg") def test_content_type_attribute(self): """ Test saving a file with a custom content type attribute. """ content = ContentFile(b"data") content.content_type = "test/foo" self.storage.save("test_file", content) s3_object_fetched = self.bucket.Object("test_file").get() self.assertEqual(b"data", s3_object_fetched["Body"].read()) self.assertEqual(s3_object_fetched["ContentType"], "test/foo") def test_content_type_not_detectable(self): """ Test saving a file with no detectable content type. """ content = ContentFile(b"data") content.content_type = None self.storage.save("test_file", content) s3_object_fetched = self.bucket.Object("test_file").get() self.assertEqual(b"data", s3_object_fetched["Body"].read()) self.assertEqual( s3_object_fetched["ContentType"], s3.S3Storage.default_content_type, ) def test_storage_open_reading_with_newlines(self): """Test file reading with "r" and "rb" and various newline characters.""" name = "test_storage_open_read_with_newlines.txt" with io.BytesIO() as temp_file: temp_file.write(b"line1\nline2\r\nmore\rtext\n") self.storage.save(name, temp_file) file = self.storage.open(name, "r") content_str = file.read() file.close() self.assertEqual(content_str, "line1\nline2\nmore\ntext\n") with io.BytesIO() as temp_file: temp_file.write(b"line1\nline2\r\nmore\rtext\n") self.storage.save(name, temp_file) file = self.storage.open(name, "rb") content_str = file.read() file.close() self.assertEqual(content_str, b"line1\nline2\r\nmore\rtext\n") with io.BytesIO() as temp_file: temp_file.write(b"line1\nline2\r\nmore\rtext") self.storage.save(name, temp_file) file = self.storage.open(name, "r") content_lines = file.readlines() file.close() self.assertEqual(content_lines, ["line1\n", "line2\n", "more\n", "text"]) with io.BytesIO() as temp_file: temp_file.write(b"line1\nline2\r\nmore\rtext") self.storage.save(name, temp_file) file = self.storage.open(name, "rb") content_lines = file.readlines() file.close() self.assertEqual(content_lines, [b"line1\n", b"line2\r\n", b"more\r", b"text"]) class TestBackwardsNames(TestCase): def test_importing(self): from storages.backends.s3boto3 import S3Boto3Storage # noqa from storages.backends.s3boto3 import S3Boto3StorageFile # noqa from storages.backends.s3boto3 import S3ManifestStaticStorage # noqa from storages.backends.s3boto3 import S3StaticStorage # noqa django-storages-1.14.5/tests/test_sftp.py000066400000000000000000000214161475414346200204360ustar00rootroot00000000000000import io import os import socket import stat from unittest.mock import MagicMock from unittest.mock import patch import paramiko from django.core.files.base import File from django.test import TestCase from django.test import override_settings from storages.backends import sftpstorage from tests.utils import NonSeekableContentFile class SFTPStorageTest(TestCase): def setUp(self): self.storage = sftpstorage.SFTPStorage(host="foo", root_path="root") def test_init(self): pass @patch("paramiko.SSHClient") def test_no_known_hosts_file(self, mock_ssh): self.storage.known_host_file = "not_existed_file" self.storage._connect() self.assertEqual("foo", mock_ssh.return_value.connect.call_args[0][0]) @patch.object(os.path, "expanduser", return_value="/path/to/known_hosts") @patch.object(os.path, "exists", return_value=True) @patch("paramiko.SSHClient") def test_error_when_known_hosts_file_not_defined(self, mock_ssh, *a): self.storage._connect() self.storage._ssh.load_host_keys.assert_called_once_with("/path/to/known_hosts") @patch("paramiko.SSHClient") def test_connect(self, mock_ssh): self.storage._connect() self.assertEqual("foo", mock_ssh.return_value.connect.call_args[0][0]) @patch("paramiko.SSHClient") def test_close_unopened(self, mock_ssh): with self.storage: pass mock_ssh.return_value.close.assert_not_called() @patch("paramiko.SSHClient") def test_close_opened(self, mock_ssh): with self.storage as storage: storage._connect() mock_ssh.return_value.close.assert_called_once_with() def test_open(self): file_ = self.storage._open("foo") self.assertIsInstance(file_, sftpstorage.SFTPStorageFile) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_read(self, mock_sftp): self.storage._read("foo") self.assertTrue(mock_sftp.open.called) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_chown(self, mock_sftp): self.storage._chown("foo", 1, 1) self.assertEqual(mock_sftp.chown.call_args[0], ("foo", 1, 1)) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_mkdir(self, mock_sftp): self.storage._mkdir("foo") self.assertEqual(mock_sftp.mkdir.call_args[0], ("foo",)) @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{"stat.side_effect": (FileNotFoundError(), True)}, ) def test_mkdir_parent(self, mock_sftp): self.storage._mkdir("bar/foo") self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ("bar",)) self.assertEqual(mock_sftp.mkdir.call_args_list[1][0], ("bar/foo",)) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_save(self, mock_sftp): self.storage._save("foo", File(io.BytesIO(b"foo"), "foo")) self.assertTrue(mock_sftp.putfo.called) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_save_non_seekable(self, mock_sftp): self.storage._save("foo", NonSeekableContentFile("foo")) self.assertTrue(mock_sftp.putfo.called) @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{"stat.side_effect": (FileNotFoundError(), True)}, ) def test_save_in_subdir(self, mock_sftp): self.storage._save("bar/foo", File(io.BytesIO(b"foo"), "foo")) self.assertEqual(mock_sftp.stat.call_args_list[0][0], ("root/bar",)) self.assertEqual(mock_sftp.mkdir.call_args_list[0][0], ("root/bar",)) self.assertTrue(mock_sftp.putfo.called) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_delete(self, mock_sftp): self.storage.delete("foo") self.assertEqual(mock_sftp.remove.call_args_list[0][0], ("root/foo",)) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_path_exists(self, mock_sftp): self.assertTrue(self.storage._path_exists("root/foo")) @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_exists(self, mock_sftp): self.assertTrue(self.storage.exists("foo")) @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{"stat.side_effect": FileNotFoundError()}, ) def test_not_exists(self, mock_sftp): self.assertFalse(self.storage.exists("foo")) @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{"stat.side_effect": FileNotFoundError()}, ) def test_not_path_exists(self, mock_sftp): self.assertFalse(self.storage._path_exists("root/foo")) @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{"stat.side_effect": socket.timeout()}, ) def test_not_exists_timeout(self, mock_sftp): with self.assertRaises(socket.timeout): self.storage.exists("foo") @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{ "listdir_attr.return_value": [ MagicMock(filename="foo", st_mode=stat.S_IFDIR), MagicMock(filename="bar", st_mode=None), ] }, ) def test_listdir(self, mock_sftp): dirs, files = self.storage.listdir("/") self.assertTrue(dirs) self.assertTrue(files) @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{ "stat.return_value.st_size": 42, }, ) def test_size(self, mock_sftp): self.assertEqual(self.storage.size("foo"), 42) def test_url(self): self.assertEqual(self.storage.url("foo"), "/media/foo") # Test custom self.storage.base_url = "http://bar.pt/" self.assertEqual(self.storage.url("foo"), "http://bar.pt/foo") # Test error with self.assertRaises(ValueError): self.storage.base_url = None self.storage.url("foo") @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{ "stat.return_value.st_mtime": 1720287559, "stat.return_value.st_atime": 1720287559, }, ) def test_times(self, mock_sftp): self.storage.get_modified_time("foo") self.storage.get_accessed_time("foo") @patch("paramiko.transport.Transport", **{"is_active.side_effect": (True, False)}) @patch("storages.backends.sftpstorage.SFTPStorage._connect") def test_sftp(self, connect, transport): self.assertIsNone(self.storage.sftp) self.assertTrue(connect.called) connect.reset_mock() self.storage._ssh = paramiko.SSHClient() self.storage._ssh._transport = transport self.storage._sftp = True self.assertTrue(self.storage.sftp) self.assertFalse(connect.called) self.assertTrue(self.storage.sftp) self.assertTrue(connect.called) def test_override_settings(self): with override_settings(SFTP_STORAGE_ROOT="foo1"): storage = sftpstorage.SFTPStorage() self.assertEqual(storage.root_path, "foo1") with override_settings(SFTP_STORAGE_ROOT="foo2"): storage = sftpstorage.SFTPStorage() self.assertEqual(storage.root_path, "foo2") def test_override_class_variable(self): class MyStorage1(sftpstorage.SFTPStorage): root_path = "foo1" storage = MyStorage1() self.assertEqual(storage.root_path, "foo1") class MyStorage2(sftpstorage.SFTPStorage): root_path = "foo2" storage = MyStorage2() self.assertEqual(storage.root_path, "foo2") def test_override_init_argument(self): storage = sftpstorage.SFTPStorage(root_path="foo1") self.assertEqual(storage.root_path, "foo1") storage = sftpstorage.SFTPStorage(root_path="foo2") self.assertEqual(storage.root_path, "foo2") class SFTPStorageFileTest(TestCase): def setUp(self): self.storage = sftpstorage.SFTPStorage(host="foo") self.file = sftpstorage.SFTPStorageFile("bar", self.storage, "wb") @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{ "stat.return_value.st_size": 42, }, ) def test_size(self, mock_sftp): self.assertEqual(self.file.size, 42) @patch( "storages.backends.sftpstorage.SFTPStorage.sftp", **{ "open.return_value.read.return_value": b"foo", }, ) def test_read(self, mock_sftp): self.assertEqual(self.file.read(), b"foo") self.assertTrue(mock_sftp.open.called) def test_write(self): self.file.write(b"foo") self.assertEqual(self.file.file.read(), b"foo") @patch("storages.backends.sftpstorage.SFTPStorage.sftp") def test_close(self, mock_sftp): self.file.write(b"foo") self.file.close() self.assertTrue(mock_sftp.putfo.called) django-storages-1.14.5/tests/test_utils.py000066400000000000000000000172741475414346200206310ustar00rootroot00000000000000import datetime import io import os.path import pathlib from django.conf import settings from django.core.exceptions import SuspiciousFileOperation from django.test import TestCase from storages import utils from storages.utils import get_available_overwrite_name as gaon class SettingTest(TestCase): def test_get_setting(self): value = utils.setting("SECRET_KEY") self.assertEqual(settings.SECRET_KEY, value) class CleanNameTests(TestCase): def test_clean_name(self): """Test the base case of clean_name.""" path = utils.clean_name("path/to/somewhere") self.assertEqual(path, "path/to/somewhere") def test_clean_name_pathlib(self): """Test for pathlib.Path handling.""" path = pathlib.Path("path/to/anywhere") self.assertEqual(utils.clean_name(path), "path/to/anywhere") path = pathlib.PurePath("path/to/anywhere") self.assertEqual(utils.clean_name(path), "path/to/anywhere") def test_clean_name_normalize(self): """ Test the normalization of clean_name """ path = utils.clean_name("path/to/../somewhere") self.assertEqual(path, "path/somewhere") def test_clean_name_trailing_slash(self): """Test the clean_name when the path has a trailing slash.""" path = utils.clean_name("path/to/somewhere/") self.assertEqual(path, "path/to/somewhere/") def test_clean_name_windows(self): """Test the clean_name when the path has a trailing slash.""" path = utils.clean_name("path\\to\\somewhere") self.assertEqual(path, "path/to/somewhere") class SafeJoinTest(TestCase): def test_normal(self): path = utils.safe_join("", "path/to/somewhere", "other", "path/to/somewhere") self.assertEqual(path, "path/to/somewhere/other/path/to/somewhere") def test_with_dot(self): path = utils.safe_join( "", "path/./somewhere/../other", "..", ".", "to/./somewhere" ) self.assertEqual(path, "path/to/somewhere") def test_with_only_dot(self): path = utils.safe_join("", ".") self.assertEqual(path, "") def test_base_url(self): path = utils.safe_join("base_url", "path/to/somewhere") self.assertEqual(path, "base_url/path/to/somewhere") def test_base_url_with_slash(self): path = utils.safe_join("base_url/", "path/to/somewhere") self.assertEqual(path, "base_url/path/to/somewhere") def test_suspicious_operation(self): with self.assertRaises(ValueError): utils.safe_join("base", "../../../../../../../etc/passwd") with self.assertRaises(ValueError): utils.safe_join("base", "/etc/passwd") def test_trailing_slash(self): """ Test safe_join with paths that end with a trailing slash. """ path = utils.safe_join("base_url/", "path/to/somewhere/") self.assertEqual(path, "base_url/path/to/somewhere/") def test_trailing_slash_multi(self): """ Test safe_join with multiple paths that end with a trailing slash. """ path = utils.safe_join("base_url/", "path/to/", "somewhere/") self.assertEqual(path, "base_url/path/to/somewhere/") def test_datetime_isoformat(self): dt = datetime.datetime(2017, 5, 19, 14, 45, 37, 123456) path = utils.safe_join("base_url", dt.isoformat()) self.assertEqual(path, "base_url/2017-05-19T14:45:37.123456") def test_join_empty_string(self): path = utils.safe_join("base_url", "") self.assertEqual(path, "base_url/") def test_with_base_url_and_dot(self): path = utils.safe_join("base_url", ".") self.assertEqual(path, "base_url/") def test_with_base_url_and_dot_and_path_and_slash(self): path = utils.safe_join("base_url", ".", "path/to/", ".") self.assertEqual(path, "base_url/path/to/") def test_join_nothing(self): path = utils.safe_join("") self.assertEqual(path, "") def test_with_base_url_join_nothing(self): path = utils.safe_join("base_url") self.assertEqual(path, "base_url/") class TestGetAvailableOverwriteName(TestCase): def test_maxlength_is_none(self): name = "superlong/file/with/path.txt" self.assertEqual(gaon(name, None), name) def test_maxlength_equals_name(self): name = "parent/child.txt" self.assertEqual(gaon(name, len(name)), name) def test_maxlength_is_greater_than_name(self): name = "parent/child.txt" self.assertEqual(gaon(name, len(name) + 1), name) def test_maxlength_less_than_name(self): name = "parent/child.txt" self.assertEqual(gaon(name, len(name) - 1), "parent/chil.txt") def test_truncates_away_filename_raises(self): name = "parent/child.txt" with self.assertRaises(SuspiciousFileOperation): gaon(name, len(name) - 5) def test_suspicious_file(self): name = "superlong/file/with/../path.txt" with self.assertRaises(SuspiciousFileOperation): gaon(name, 50) class TestReadBytesWrapper(TestCase): def test_with_bytes_file(self): file = io.BytesIO(b"abcd") file_wrapped = utils.ReadBytesWrapper(file) # test read() with default args self.assertEqual(b"abcd", file_wrapped.read()) # test seek() with default args self.assertEqual(0, file_wrapped.seek(0)) self.assertEqual(b"abcd", file_wrapped.read()) # test read() with custom args file_wrapped.seek(0) self.assertEqual(b"ab", file_wrapped.read(2)) # test seek() with custom args self.assertEqual(1, file_wrapped.seek(-1, io.SEEK_CUR)) self.assertEqual(b"bcd", file_wrapped.read()) def test_with_string_file(self): file = io.StringIO("wxyz") file_wrapped = utils.ReadBytesWrapper(file) # test read() with default args self.assertEqual(b"wxyz", file_wrapped.read()) # test seek() with default args self.assertEqual(0, file_wrapped.seek(0)) self.assertEqual(b"wxyz", file_wrapped.read()) # test read() with custom args file_wrapped.seek(0) self.assertEqual(b"wx", file_wrapped.read(2)) # test seek() with custom args self.assertEqual(2, file_wrapped.seek(0, io.SEEK_CUR)) self.assertEqual(b"yz", file_wrapped.read()) # I chose the characters ™€‰ for the following tests because they produce different # bytes when encoding with utf-8 vs windows-1252 vs utf-16 def test_with_string_file_specified_encoding(self): content = "\u2122\u20AC\u2030" file = io.StringIO(content) file_wrapped = utils.ReadBytesWrapper(file, encoding="utf-16") # test read() returns specified encoding self.assertEqual(file_wrapped.read(), content.encode("utf-16")) def test_with_string_file_detect_encoding(self): content = "\u2122\u20AC\u2030" with open( file=os.path.join( os.path.dirname(__file__), "test_files", "windows-1252-encoded.txt" ), mode="r", encoding="windows-1252", ) as file: self.assertEqual(file.read(), content) file.seek(0) file_wrapped = utils.ReadBytesWrapper(file) # test read() returns encoding detected from file object. self.assertEqual(file_wrapped.read(), content.encode("windows-1252")) def test_with_string_file_fallback_encoding(self): content = "\u2122\u20AC\u2030" file = io.StringIO(content) file_wrapped = utils.ReadBytesWrapper(file) # test read() returns fallback utf-8 encoding self.assertEqual(file_wrapped.read(), content.encode("utf-8")) django-storages-1.14.5/tests/utils.py000066400000000000000000000004001475414346200175510ustar00rootroot00000000000000from django.core.files.base import ContentFile class NonSeekableContentFile(ContentFile): def open(self, mode=None): return self def seekable(self): return False def seek(self, pos, whence=0): raise AttributeError() django-storages-1.14.5/tox.ini000066400000000000000000000021451475414346200162200ustar00rootroot00000000000000[tox] minversion = 1.9 envlist = py{3.7,3.8,3.9,3.10}-django3.2 py{3.8,3.9,3.10,3.11}-django4.1 py{3.8,3.9,3.10,3.11}-django4.2 py{3.10,3.11,3.12}-django5.0 py{3.10,3.11,3.12}-django5.1 py{3.12,3.13}-djangomain ruff [testenv] setenv = DJANGO_SETTINGS_MODULE = tests.settings PYTHONWARNINGS = always PYTHONDONTWRITEBYTECODE = 1 # Use a non-existent file to prevent boto3 from loading # any configuration from the user's environment AWS_CONFIG_FILE = {toxinidir}/tests/no_such_file.conf commands = pytest --cov=storages {posargs} deps = cryptography django3.2: django~=3.2.9 django4.1: django~=4.1.0 django4.2: django~=4.2.0 django5.0: django~=5.0.0 django5.1: django~=5.1.0 djangomain: https://github.com/django/django/archive/main.tar.gz moto<5 pytest pytest-cov rsa extras = azure boto3 dropbox google libcloud sftp [testenv:ruff] deps = black ruff commands = ruff check . black --check . skip_install = true [pytest] # Default test paths to run, if no other paths are specified on the CLI # (specify paths after a -- e.g. `tox -- tests/test_s3.py`) testpaths = tests/