pax_global_header 0000666 0000000 0000000 00000000064 14334672560 0014524 g ustar 00root root 0000000 0000000 52 comment=b95c9536b9c4069e4ee6cb3d78671c525fd901e6
PyDrive2-1.15.0/ 0000775 0000000 0000000 00000000000 14334672560 0013254 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/.github/ 0000775 0000000 0000000 00000000000 14334672560 0014614 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/.github/dependabot.yml 0000664 0000000 0000000 00000000421 14334672560 0017441 0 ustar 00root root 0000000 0000000 version: 2
updates:
- directory: "/"
package-ecosystem: "pip"
schedule:
interval: "daily"
labels:
- "maintenance"
- directory: "/"
package-ecosystem: "github-actions"
schedule:
interval: "daily"
labels:
- "maintenance"
PyDrive2-1.15.0/.github/release-drafter.yml 0000664 0000000 0000000 00000000137 14334672560 0020405 0 ustar 00root root 0000000 0000000 # Config for https://github.com/apps/release-drafter
branches:
- main
template: |
$CHANGES
PyDrive2-1.15.0/.github/workflows/ 0000775 0000000 0000000 00000000000 14334672560 0016651 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/.github/workflows/docs.yml 0000664 0000000 0000000 00000001134 14334672560 0020323 0 ustar 00root root 0000000 0000000 name: Build docs
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: Build
run: |
pip install -U -r docs/requirements.txt
pip install ".[fsspec]"
sphinx-build docs dist/site -b dirhtml -a
- name: Publish
if: ${{ github.event_name == 'push' && github.ref_name == 'main' }}
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: dist/site
PyDrive2-1.15.0/.github/workflows/pre-commit.yml 0000664 0000000 0000000 00000000347 14334672560 0021454 0 ustar 00root root 0000000 0000000 name: pre-commit
on:
pull_request:
push:
branches: [main]
jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: pre-commit/action@v3.0.0
PyDrive2-1.15.0/.github/workflows/publish.yml 0000664 0000000 0000000 00000001172 14334672560 0021043 0 ustar 00root root 0000000 0000000 on:
push:
branches:
- main
release:
types: [published]
workflow_dispatch:
name: Publish
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: Install pypa/build
run: python -m pip install build
- name: Build the package
run: |
python -m build --sdist --wheel \
--outdir dist/ .
- name: Publish
if: startsWith(github.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_TOKEN }}
PyDrive2-1.15.0/.github/workflows/release-drafter.yml 0000664 0000000 0000000 00000000607 14334672560 0022444 0 ustar 00root root 0000000 0000000 name: Release Drafter
on:
push:
# branches to consider in the event; optional, defaults to all
branches:
- main
jobs:
update_release_draft:
runs-on: ubuntu-latest
steps:
# Drafts your next Release notes as Pull Requests are merged into "main"
- uses: release-drafter/release-drafter@v5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PyDrive2-1.15.0/.github/workflows/test.yml 0000664 0000000 0000000 00000002101 14334672560 0020345 0 ustar 00root root 0000000 0000000 on:
push:
branches:
- main
pull_request_target:
name: Tests
jobs:
authorize:
environment:
${{ (github.event_name == 'pull_request_target' &&
github.event.pull_request.head.repo.full_name != github.repository) &&
'external' || 'internal' }}
runs-on: ubuntu-latest
steps:
- run: echo ✓
test:
needs: authorize
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
max-parallel: 3
matrix:
os: [ubuntu-18.04, macos-11, windows-2019]
pyv: ["3.7", "3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v3
with:
# NOTE: needed for pull_request_target to use PR code
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.pyv }}
- name: Install dependencies
run: python -m pip install -e '.[fsspec, tests]'
- name: Test
run: python -m pytest -m "not manual"
env:
GDRIVE_USER_CREDENTIALS_DATA: ${{ secrets.GDRIVE_USER_CREDENTIALS_DATA }}
PyDrive2-1.15.0/.gitignore 0000664 0000000 0000000 00000000100 14334672560 0015233 0 ustar 00root root 0000000 0000000 *.pyc
*~
*.egg-info
dist
.cache
.env
.idea
pip-wheel-metadata
PyDrive2-1.15.0/.pre-commit-config.yaml 0000664 0000000 0000000 00000000333 14334672560 0017534 0 ustar 00root root 0000000 0000000 repos:
- hooks:
- id: black
language_version: python3
repo: https://github.com/psf/black
rev: 22.10.0
- hooks:
- id: flake8
language_version: python3
repo: https://github.com/PyCQA/flake8
rev: 5.0.4
PyDrive2-1.15.0/CHANGES 0000664 0000000 0000000 00000000051 14334672560 0014243 0 ustar 00root root 0000000 0000000 v1.0.0, Aug 16, 2013 -- Initial release.
PyDrive2-1.15.0/CONTRIBUTING.rst 0000664 0000000 0000000 00000000570 14334672560 0015717 0 ustar 00root root 0000000 0000000 Contributing guidelines
=======================
How to become a contributor and submit your own code
----------------------------------------------------
TODO
Contributing code
~~~~~~~~~~~~~~~~~
If you have improvements to PyDrive2, send us your pull requests! For those
just getting started, Github has a `howto `_.
PyDrive2-1.15.0/LICENSE 0000664 0000000 0000000 00000023704 14334672560 0014267 0 ustar 00root root 0000000 0000000 Copyright 2013 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
PyDrive2-1.15.0/MANIFEST.in 0000664 0000000 0000000 00000000257 14334672560 0015016 0 ustar 00root root 0000000 0000000 include AUTHORS
include CHANGES
include LICENSE
include MANIFEST.in
include README.rst
recursive-include docs *
recursive-include pydrive2/test *
recursive-exclude * *.py[co]
PyDrive2-1.15.0/README.rst 0000664 0000000 0000000 00000011714 14334672560 0014747 0 ustar 00root root 0000000 0000000 |CI| |Conda| |PyPI|
.. |CI| image:: https://github.com/iterative/PyDrive2/workflows/Tests/badge.svg?branch=main
:target: https://github.com/iterative/PyDrive2/actions
:alt: GHA Tests
.. |Conda| image:: https://img.shields.io/conda/v/conda-forge/PyDrive2.svg?label=conda&logo=conda-forge
:target: https://anaconda.org/conda-forge/PyDrive2
:alt: Conda-forge
.. |PyPI| image:: https://img.shields.io/pypi/v/PyDrive2.svg?label=pip&logo=PyPI&logoColor=white
:target: https://pypi.org/project/PyDrive2
:alt: PyPI
PyDrive2
--------
*PyDrive2* is a wrapper library of
`google-api-python-client `_
that simplifies many common Google Drive API V2 tasks. It is an actively
maintained fork of `https://pypi.python.org/pypi/PyDrive `_.
By the authors and maintainers of the `Git for Data `_ - DVC
project.
Project Info
------------
- Package: `https://pypi.python.org/pypi/PyDrive2 `_
- Documentation: `https://docs.iterative.ai/PyDrive2 `_
- Source: `https://github.com/iterative/PyDrive2 `_
- Changelog: `https://github.com/iterative/PyDrive2/releases `_
- `Running tests `_
Features of PyDrive2
--------------------
- Simplifies OAuth2.0 into just few lines with flexible settings.
- Wraps `Google Drive API V2 `_ into
classes of each resource to make your program more object-oriented.
- Helps common operations else than API calls, such as content fetching
and pagination control.
- Provides `fsspec`_ filesystem implementation.
How to install
--------------
You can install PyDrive2 with regular ``pip`` command.
::
$ pip install PyDrive2
To install the current development version from GitHub, use:
::
$ pip install git+https://github.com/iterative/PyDrive2.git#egg=PyDrive2
OAuth made easy
---------------
Download *client\_secrets.json* from Google API Console and OAuth2.0 is
done in two lines. You can customize behavior of OAuth2 in one settings
file *settings.yaml*.
.. code:: python
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
gauth = GoogleAuth()
gauth.LocalWebserverAuth()
drive = GoogleDrive(gauth)
File management made easy
-------------------------
Upload/update the file with one method. PyDrive2 will do it in the most
efficient way.
.. code:: python
file1 = drive.CreateFile({'title': 'Hello.txt'})
file1.SetContentString('Hello')
file1.Upload() # Files.insert()
file1['title'] = 'HelloWorld.txt' # Change title of the file
file1.Upload() # Files.patch()
content = file1.GetContentString() # 'Hello'
file1.SetContentString(content+' World!') # 'Hello World!'
file1.Upload() # Files.update()
file2 = drive.CreateFile()
file2.SetContentFile('hello.png')
file2.Upload()
print('Created file %s with mimeType %s' % (file2['title'],
file2['mimeType']))
# Created file hello.png with mimeType image/png
file3 = drive.CreateFile({'id': file2['id']})
print('Downloading file %s from Google Drive' % file3['title']) # 'hello.png'
file3.GetContentFile('world.png') # Save Drive file as a local file
# or download Google Docs files in an export format provided.
# downloading a docs document as an html file:
docsfile.GetContentFile('test.html', mimetype='text/html')
File listing pagination made easy
---------------------------------
*PyDrive2* handles file listing pagination for you.
.. code:: python
# Auto-iterate through all files that matches this query
file_list = drive.ListFile({'q': "'root' in parents"}).GetList()
for file1 in file_list:
print('title: {}, id: {}'.format(file1['title'], file1['id']))
# Paginate file lists by specifying number of max results
for file_list in drive.ListFile({'maxResults': 10}):
print('Received {} files from Files.list()'.format(len(file_list))) # <= 10
for file1 in file_list:
print('title: {}, id: {}'.format(file1['title'], file1['id']))
Fsspec filesystem
-----------------
*PyDrive2* provides easy way to work with your files through `fsspec`_
compatible `GDriveFileSystem`_.
.. code:: python
from pydrive2.fs import GDriveFileSystem
fs = GDriveFileSystem("root", client_id=my_id, client_secret=my_secret)
for root, dnames, fnames in fs.walk(""):
...
.. _`GDriveFileSystem`: https://docs.iterative.ai/PyDrive2/fsspec/
Concurrent access made easy
---------------------------
All API functions made to be thread-safe.
Contributors
------------
Thanks to all our contributors!
.. image:: https://contrib.rocks/image?repo=iterative/PyDrive2
:target: https://github.com/iterative/PyDrive2/graphs/contributors
.. _`fsspec`: https://filesystem-spec.readthedocs.io/en/latest/
PyDrive2-1.15.0/docs/ 0000775 0000000 0000000 00000000000 14334672560 0014204 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/docs/README.md 0000664 0000000 0000000 00000001431 14334672560 0015462 0 ustar 00root root 0000000 0000000 This document outlines how to rebuild the documentation.
## Setup
- Install Sphinx: `pip install sphinx` or `apt-get install python-sphinx`
- Install theme: `pip install furo`
- Build site: `sphinx-build docs dist/site -b dirhtml -a`
Updating GitHub Pages:
```bash
cd dist/site
git init
git add .
git commit -m "update pages"
git branch -M gh-pages
git push -f git@github.com:iterative/PyDrive2 gh-pages
```
## Contributing
If code files were added, the easiest way to reflect code changes in the
documentation by referencing the file from within `pydrive.rst`.
If a non-code related file was added (it has to have the `.rst` ending),
then add the file name to the list of names under "Table of Contents"
in `index.rst`. Make sure to add the file name excluding the `.rst` file ending.
PyDrive2-1.15.0/docs/conf.py 0000664 0000000 0000000 00000022723 14334672560 0015511 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
#
# PyDrive2 documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 12 23:01:40 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath("../"))
# exclude_patterns = ['_build', '**tests**', '**spi**']
exclude_dirnames = ["test"]
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.8"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["sphinx.ext.autodoc", "sphinx.ext.githubpages"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The root toctree document.
root_doc = "index"
# General information about the project.
project = "PyDrive2"
copyright = (
"2022, JunYoung Gwak, Scott Blevins, Robin Nabel, Google Inc, "
"Iterative Inc"
)
author = "JunYoung Gwak, Scott Blevins, Robin Nabel, Iterative Inc"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.14.0"
# The full version, including alpha/beta/rc tags.
release = "1.14.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
"pydrive2/test/*",
"test/*",
"pydrive2/test",
"../pydrive2/test",
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "furo"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "PyDrive2doc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
root_doc,
"PyDrive2.tex",
"PyDrive2 Documentation",
"JunYoung Gwak, Scott Blevins, Robin Nabel, Iterative Inc",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(root_doc, "pydrive2", "PyDrive2 Documentation", [author], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
root_doc,
"PyDrive2",
"PyDrive2 Documentation",
author,
"PyDrive2",
"One line description of project.",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
PyDrive2-1.15.0/docs/filelist.rst 0000664 0000000 0000000 00000004102 14334672560 0016546 0 ustar 00root root 0000000 0000000 File listing made easy
=============================
*PyDrive* handles paginations and parses response as list of `GoogleDriveFile`_.
Get all files which matches the query
-------------------------------------
Create `GoogleDriveFileList`_ instance with `parameters of Files.list()`_ as ``dict``.
Call `GetList()`_ and you will get all files that matches your query as a list of `GoogleDriveFile`_.
The syntax and possible option of the query ``q`` parameter can be found in `search for files` Google documentation.
.. code-block:: python
from pydrive2.drive import GoogleDrive
drive = GoogleDrive(gauth) # Create GoogleDrive instance with authenticated GoogleAuth instance
# Auto-iterate through all files in the root folder.
file_list = drive.ListFile({'q': "'root' in parents and trashed=false"}).GetList()
for file1 in file_list:
print('title: %s, id: %s' % (file1['title'], file1['id']))
You can update metadata or content of these `GoogleDriveFile`_ instances if you need it.
Paginate and iterate through files
----------------------------------
*PyDrive* provides Pythonic way of paginating and iterating through list of files.
Here is an example how to do this, ``maxResults`` below defines how many
files it retrieves at once and we wrap it into a ``for`` loop to iterate:
Sample code continues from above:
.. code-block:: python
# Paginate file lists by specifying number of max results
for file_list in drive.ListFile({'q': 'trashed=true', 'maxResults': 10}):
print('Received %s files from Files.list()' % len(file_list)) # <= 10
for file1 in file_list:
print('title: %s, id: %s' % (file1['title'], file1['id']))
.. _`GoogleDriveFile`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile
.. _`GoogleDriveFileList`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFileList
.. _`parameters of Files.list()`: https://developers.google.com/drive/v2/reference/files/list#request
.. _`GetList()`: /PyDrive2/pydrive2/#pydrive2.apiattr.ApiResourceList.GetList
.. _`search for files`: https://developers.google.com/drive/api/v2/search-files
PyDrive2-1.15.0/docs/filemanagement.rst 0000664 0000000 0000000 00000030265 14334672560 0017720 0 ustar 00root root 0000000 0000000 File management made easy
=========================
There are many methods to create and update file metadata and contents.
With *PyDrive*, you don't have to care about any of these different API methods.
Manipulate file metadata and contents from `GoogleDriveFile`_ object and call
`Upload()`_. *PyDrive* will make the optimal API call for you.
Upload a new file
-----------------
Here is a sample code to upload a file. ``gauth`` is an authenticated `GoogleAuth`_ object.
.. code-block:: python
from pydrive2.drive import GoogleDrive
# Create GoogleDrive instance with authenticated GoogleAuth instance.
drive = GoogleDrive(gauth)
# Create GoogleDriveFile instance with title 'Hello.txt'.
file1 = drive.CreateFile({'title': 'Hello.txt'})
file1.Upload() # Upload the file.
print('title: %s, id: %s' % (file1['title'], file1['id']))
# title: Hello.txt, id: {{FILE_ID}}
Now, you will have a file 'Hello.txt' uploaded to your Google Drive. You can open it from web interface to check its content, 'Hello World!'.
Note that `CreateFile()`_ will create `GoogleDriveFile`_ instance but not actually upload a file to Google Drive. You can initialize `GoogleDriveFile`_ object by itself. However, it is not recommended to do so in order to keep authentication consistent.
Delete, Trash and un-Trash files
--------------------------------
You may want to delete, trash, or un-trash a file. To do this use ``Delete()``,
``Trash()`` or ``UnTrash()`` on a GoogleDriveFile object.
*Note:* ``Trash()`` *moves a file into the trash and can be recovered,*
``Delete()`` *deletes the file permanently and immediately.*
.. code-block:: python
# Create GoogleDriveFile instance and upload it.
file1 = drive.CreateFile()
file1.Upload()
file1.Trash() # Move file to trash.
file1.UnTrash() # Move file out of trash.
file1.Delete() # Permanently delete the file.
Update file metadata
--------------------
You can manipulate file metadata from a `GoogleDriveFile`_ object just as you manipulate a ``dict``.
The format of file metadata can be found in the Google Drive API documentation: `Files resource`_.
Sample code continues from `Upload a new file`_:
.. code-block:: python
file1['title'] = 'HelloWorld.txt' # Change title of the file.
file1.Upload() # Update metadata.
print('title: %s' % file1['title']) # title: HelloWorld.txt.
Now, the title of your file has changed to 'HelloWorld.txt'.
Download file metadata from file ID
-----------------------------------
You might want to get file metadata from file ID. In that case, just initialize
`GoogleDriveFile`_ with file ID and access metadata from `GoogleDriveFile`_
just as you access ``dict``.
Sample code continues from above:
.. code-block:: python
# Create GoogleDriveFile instance with file id of file1.
file2 = drive.CreateFile({'id': file1['id']})
print('title: %s, mimeType: %s' % (file2['title'], file2['mimeType']))
# title: HelloWorld.txt, mimeType: text/plain
Handling special metadata
-------------------------
Not all metadata can be set with the methods described above.
PyDrive gives you access to the metadata of an object through
``file_object.FetchMetadata()``. This function has two optional parameters:
``fields`` and ``fetch_all``.
.. code-block:: python
file1 = drive.CreateFile({'id': ''})
# Fetches all basic metadata fields, including file size, last modified etc.
file1.FetchMetadata()
# Fetches all metadata available.
file1.FetchMetadata(fetch_all=True)
# Fetches the 'permissions' metadata field.
file1.FetchMetadata(fields='permissions')
# You can update a list of specific fields like this:
file1.FetchMetadata(fields='permissions,labels,mimeType')
For more information on available metadata fields have a look at the
`official documentation`_.
Insert permissions
__________________
Insert, retrieving or deleting permissions is illustrated by making a file
readable to all who have a link to the file.
.. code-block:: python
file1 = drive.CreateFile()
file1.Upload()
# Insert the permission.
permission = file1.InsertPermission({
'type': 'anyone',
'value': 'anyone',
'role': 'reader'})
print(file1['alternateLink']) # Display the sharable link.
Note: ``InsertPermission()`` calls ``GetPermissions()`` after successfully
inserting the permission.
You can find more information on the permitted fields of a permission
`here `_.
This file is now shared and anyone with the link can view it. But what if you
want to check whether a file is already shared?
List permissions
________________
Permissions can be fetched using the ``GetPermissions()`` function of a
``GoogleDriveFile``, and can be used like so:
.. code-block:: python
# Create a new file
file1 = drive.CreateFile()
# Fetch permissions.
permissions = file1.GetPermissions()
print(permissions)
# The permissions are also available as file1['permissions']:
print(file1['permissions'])
For the more advanced user: ``GetPermissions()`` is a shorthand for:
.. code-block:: python
# Fetch Metadata, including the permissions field.
file1.FetchMetadata(fields='permissions')
# The permissions array is now available for further use.
print(file1['permissions'])
Remove a Permission
___________________
*PyDrive* allows you to remove a specific permission using the
``DeletePermission(permission_id)`` function. This function allows you to delete
one permission at a time by providing the permission's ID.
.. code-block:: python
file1 = drive.CreateFile({'id': ''})
permissions = file1.GetPermissions() # Download file permissions.
permission_id = permissions[1]['id'] # Get a permission ID.
file1.DeletePermission(permission_id) # Delete the permission.
Get files by complex queries
----------------------------
We can get a file by name and by other constraints, usually a filename will be
unique but we can have two equal names with different extensions, e.g.,
*123.jpeg and 123.mp3*. So if you expect only one file add more constraints to
the query, see `Query string examples `_, as a result we get
a list of `GoogleDriveFile`_ instances.
.. code-block:: python
from pydrive2.drive import GoogleDrive
# Create GoogleDrive instance with authenticated GoogleAuth instance.
drive = GoogleDrive(gauth)
filename = 'file_test'
# Query
query = {'q': f"title = '{filename}' and mimeType='{mimetype}'"}
# Get list of files that match against the query
files = drive.ListFile(query).GetList()
Upload and update file content
------------------------------
Managing file content is as easy as managing file metadata. You can set file
content with either `SetContentFile(filename)`_ or `SetContentString(content)`_
and call `Upload()`_ just as you did to upload or update file metadata.
Sample code continues from `Download file metadata from file ID`_:
.. code-block:: python
file4 = drive.CreateFile({'title':'appdata.json', 'mimeType':'application/json'})
file4.SetContentString('{"firstname": "John", "lastname": "Smith"}')
file4.Upload() # Upload file.
file4.SetContentString('{"firstname": "Claudio", "lastname": "Afshar"}')
file4.Upload() # Update content of the file.
file5 = drive.CreateFile()
# Read file and set it as a content of this instance.
file5.SetContentFile('cat.png')
file5.Upload() # Upload the file.
print('title: %s, mimeType: %s' % (file5['title'], file5['mimeType']))
# title: cat.png, mimeType: image/png
**Advanced Users:** If you call SetContentFile and GetContentFile you can can
define which character encoding is to be used by using the optional
parameter `encoding`.
If you, for example, are retrieving a file which is stored on your Google
Drive which is encoded with ISO-8859-1, then you can get the content string
like so:
.. code-block:: python
content_string = file4.GetContentString(encoding='ISO-8859-1')
Upload data as bytes in memory buffer
--------------------------------------
Data can be kept as bytes in an in-memory buffer when we use the ``io`` module’s
Byte IO operations, we can upload files that reside in memory, for
example we have a base64 image, we can decode the string and upload it to drive
without the need to save as a file and use `SetContentFile(filename)`_
.. code-block:: python
import io
from pydrive2.drive import GoogleDrive
# Create GoogleDrive instance with authenticated GoogleAuth instance.
drive = GoogleDrive(gauth)
# Define file name and type
metadata = {
'title': 'image_test',
'mimeType': 'image/jpeg'
}
# Create file
file = drive.CreateFile(metadata=metadata)
# Buffered I/O implementation using an in-memory bytes buffer.
image_file = io.BytesIO(image_bytes)
# Set the content of the file
file.content = image_file
# Upload the file to google drive
file.Upload()
Upload file to a specific folder
--------------------------------
In order to upload a file into a specific drive folder we need to pass the
``id`` of the folder in the metadata ``param`` from `CreateFile()`_.
Save the image from the previous example into a specific folder``:``
.. code-block:: python
metadata = {
'parents': [
{"id": id_drive_folder}
],
'title': 'image_test',
'mimeType': 'image/jpeg'
}
# Create file
file = drive.CreateFile(metadata=metadata)
file.Upload()
Download file content
---------------------
Just as you uploaded file content, you can download it using
`GetContentFile(filename)`_ or `GetContentString()`_.
Sample code continues from above:
.. code-block:: python
# Initialize GoogleDriveFile instance with file id.
file6 = drive.CreateFile({'id': file5['id']})
file6.GetContentFile('catlove.png') # Download file as 'catlove.png'.
# Initialize GoogleDriveFile instance with file id.
file7 = drive.CreateFile({'id': file4['id']})
content = file7.GetContentString()
# content: '{"firstname": "Claudio", "lastname": "Afshar"}'
file7.SetContentString(content.replace('lastname', 'familyname'))
file7.Upload()
# Uploaded content: '{"firstname": "Claudio", "familyname": "Afshar"}'
**Advanced users**: Google Drive is `known`_ to add BOM (Byte Order Marks) to
the beginning of some files, such as Google Documents downloaded as text files.
In some cases confuses parsers and leads to corrupt files.
PyDrive can remove the BOM from the beginning of a file when it
is downloaded. Just set the `remove_bom` parameter in `GetContentString()` or
`GetContentFile()` - see `examples/strip_bom_example.py` in the GitHub
repository for an example.
Abusive files
-------------
Files identified as `abusive`_ (malware, etc.) are only downloadable by the owner.
If you see a
'This file has been identified as malware or spam and cannot be downloaded'
error, set 'acknowledge_abuse=True' parameter in `GetContentFile()`. By using
it you indicate that you acknowledge the risks of downloading potential malware.
.. _`GoogleDriveFile`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile
.. _`Upload()`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile.Upload
.. _`GoogleAuth`: /PyDrive2/pydrive2/#pydrive2.auth.GoogleAuth
.. _`CreateFile()`: /PyDrive2/pydrive2/#pydrive2.drive.GoogleDrive.CreateFile
.. _`Files resource`: https://developers.google.com/drive/v2/reference/files#resource-representations
.. _`SetContentFile(filename)`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile.SetContentFile
.. _`SetContentString(content)`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile.SetContentString
.. _`GetContentFile(filename)`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile.GetContentFile
.. _`GetContentString()`: ./PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile.GetContentString
.. _`official documentation`: https://developers.google.com/drive/v2/reference/files#resource-representations
.. _`known`: https://productforums.google.com/forum/#!topic/docs/BJLimQDGtjQ
.. _`abusive`: https://support.google.com/docs/answer/148505
.. _`query_parameters`: https://developers.google.com/drive/api/guides/search-files#examples
PyDrive2-1.15.0/docs/fsspec.rst 0000664 0000000 0000000 00000006106 14334672560 0016224 0 ustar 00root root 0000000 0000000 fsspec filesystem
=================
*PyDrive2* provides easy way to work with your files through `fsspec`_
compatible `GDriveFileSystem`_.
Installation
------------
.. code-block:: sh
pip install 'pydrive2[fsspec]'
Local webserver
---------------
.. code-block:: python
from pydrive2.fs import GDriveFileSystem
fs = GDriveFileSystem(
"root",
client_id="my_client_id",
client_secret="my_client_secret",
)
By default, credentials will be cached per 'client_id', but if you are using
multiple users you might want to use 'profile' to avoid accidentally using
someone else's cached credentials:
.. code-block:: python
from pydrive2.fs import GDriveFileSystem
fs = GDriveFileSystem(
"root",
client_id="my_client_id",
client_secret="my_client_secret",
profile="myprofile",
)
Writing cached credentials to a file and using it if it already exists (which
avoids interactive auth):
.. code-block:: python
from pydrive2.fs import GDriveFileSystem
fs = GDriveFileSystem(
"root",
client_id="my_client_id",
client_secret="my_client_secret",
client_json_file_path="/path/to/keyfile.json",
)
Using cached credentials from json string (avoids interactive auth):
.. code-block:: python
from pydrive2.fs import GDriveFileSystem
fs = GDriveFileSystem(
"root",
client_id="my_client_id",
client_secret="my_client_secret",
client_json=json_string,
)
Service account
---------------
Using json keyfile path:
.. code-block:: python
from pydrive2.fs import GDriveFileSystem
fs = GDriveFileSystem(
"root",
use_service_account=True,
client_json_file_path="/path/to/keyfile.json",
)
Using json keyfile string:
.. code-block:: python
from pydrive2.fs import GDriveFileSystem
fs = GDriveFileSystem(
"root",
use_service_account=True,
client_json=json_string,
)
Use `client_user_email` if you are using `delegation of authority`_.
Additional parameters
---------------------
:trash_only (bool): Move files to trash instead of deleting.
:acknowledge_abuse (bool): Acknowledging the risk and download file identified as abusive. See `Abusive files`_ for more info.
Using filesystem
----------------
.. code-block:: python
for root, dnames, fnames in fs.walk(""):
for dname in dnames:
print(f"dir: {root}/{dname}")
for fname in fnames:
print(f"file: {root}/{fname}")
Filesystem instance offers a large number of methods for getting information
about and manipulating files, refer to fsspec docs on
`how to use a filesystem`_.
.. _`fsspec`: https://filesystem-spec.readthedocs.io/en/latest/
.. _`GDriveFileSystem`: /PyDrive2/pydrive2/#pydrive2.fs.GDriveFileSystem
.. _`delegation of authority`: https://developers.google.com/admin-sdk/directory/v1/guides/delegation
.. _`Abusive files`: /PyDrive2/filemanagement/index.html#abusive-files
.. _`how to use a filesystem`: https://filesystem-spec.readthedocs.io/en/latest/usage.html#use-a-file-system
PyDrive2-1.15.0/docs/genindex.rst 0000664 0000000 0000000 00000000014 14334672560 0016532 0 ustar 00root root 0000000 0000000 Index
=====
PyDrive2-1.15.0/docs/index.rst 0000664 0000000 0000000 00000002455 14334672560 0016053 0 ustar 00root root 0000000 0000000 .. PyDrive2 documentation master file, created by
sphinx-quickstart on Sun Jun 12 23:01:40 2016.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to PyDrive2's documentation!
====================================
PyDrive2 is a wrapper library of `google-api-python-client`_ that simplifies many common Google Drive API tasks.
Project Info
============
- Package: `https://pypi.python.org/pypi/PyDrive2 `_
- Documentation: `https://docs.iterative.ai/PyDrive2 `_
- Source: `https://github.com/iterative/PyDrive2 `_
- Changelog: `https://github.com/iterative/PyDrive2/releases `_
How to install
==============
You can install PyDrive2 with regular ``pip`` command.
::
$ pip install PyDrive2
To install the current development version from GitHub, use:
::
$ pip install git+https://github.com/iterative/PyDrive2.git#egg=PyDrive2
.. _`google-api-python-client`: https://github.com/google/google-api-python-client
Table of Contents
=================
.. toctree::
:maxdepth: 2
quickstart
oauth
filemanagement
filelist
fsspec
pydrive2
genindex
PyDrive2-1.15.0/docs/oauth.rst 0000664 0000000 0000000 00000023067 14334672560 0016066 0 ustar 00root root 0000000 0000000 OAuth made easy
===============
Authentication in two lines
---------------------------
OAuth2.0 is complex and difficult to start with. To make it more simple,
*PyDrive2* makes all authentication into just two lines.
.. code-block:: python
from pydrive2.auth import GoogleAuth
gauth = GoogleAuth()
# Create local webserver and auto handles authentication.
gauth.LocalWebserverAuth()
# Or use the CommandLineAuth(), which provides you with a link to paste
# into your browser. The site it leads to then provides you with an
# authentication token which you paste into the command line.
# Commented out as it is an alternative to the LocalWebserverAuth() above,
# and someone will just copy-paste the entire thing into their editor.
# gauth.CommandLineAuth()
To make this code work, you need to download the application configurations file
from APIs Console. Take a look at quickstart_ for detailed instructions.
`LocalWebserverAuth()`_ is a built-in method of `GoogleAuth`_ which sets up
local webserver to automatically receive authentication code from user and
authorizes by itself. You can also use `CommandLineAuth()`_ which manually
takes code from user at command line.
.. _quickstart: /PyDrive2/quickstart/#authentication
.. _`LocalWebserverAuth()`: /PyDrive2/pydrive2/#pydrive2.auth.GoogleAuth.LocalWebserverAuth
.. _`GoogleAuth`: /PyDrive2/pydrive2/#pydrive2.auth.GoogleAuth
.. _`CommandLineAuth()`: /PyDrive2/pydrive2/#pydrive.auth.GoogleAuth.CommandLineAuth
Automatic and custom authentication with *settings.yaml*
--------------------------------------------------------
Read this section if you need a custom authentication flow, **such as silent
authentication on a remote machine**. For an example of such a setup have a look
at `Sample settings.yaml`_.
OAuth is complicated and it requires a lot of settings. By default,
when you don't provide any settings, *PyDrive* will automatically set default
values which works for most of the cases. Here are some default settings.
- Read client configuration from file *client_secrets.json*
- OAuth scope: :code:`https://www.googleapis.com/auth/drive`
- Don't save credentials
- Don't retrieve refresh token
However, you might want to customize these settings while maintaining two lines
of clean code. If that is the case, you can make *settings.yaml* file in your
working directory and *PyDrive* will read it to customize authentication
behavior.
These are all the possible fields of a *settings.yaml* file:
.. code-block:: python
client_config_backend: {{str}}
client_config_file: {{str}}
client_config:
client_id: {{str}}
client_secret: {{str}}
auth_uri: {{str}}
token_uri: {{str}}
redirect_uri: {{str}}
revoke_uri: {{str}}
service_config:
client_user_email: {{str}}
client_json_file_path: {{str}}
client_json_dict: {{dict}}
client_json: {{str}}
save_credentials: {{bool}}
save_credentials_backend: {{str}}
save_credentials_file: {{str}}
save_credentials_dict: {{dict}}
save_credentials_key: {{str}}
get_refresh_token: {{bool}}
oauth_scope: {{list of str}}
Fields explained:
:client_config_backend (str): From where to read client configuration(API application settings such as client_id and client_secrets) from. Valid values are 'file', 'settings' and 'service'. **Default**: 'file'. **Required**: No.
:client_config_file (str): When *client_config_backend* is 'file', path to the file containing client configuration. **Default**: 'client_secrets.json'. **Required**: No.
:client_config (dict): Place holding dictionary for client configuration when *client_config_backend* is 'settings'. **Required**: Yes, only if *client_config_backend* is 'settings' and not using *service_config*
:client_config['client_id'] (str): Client ID of the application. **Required**: Yes, only if *client_config_backend* is 'settings'
:client_config['client_secret'] (str): Client secret of the application. **Required**: Yes, only if *client_config_backend* is 'settings'
:client_config['auth_uri'] (str): The authorization server endpoint URI. **Default**: 'https://accounts.google.com/o/oauth2/auth'. **Required**: No.
:client_config['token_uri'] (str): The token server endpoint URI. **Default**: 'https://accounts.google.com/o/oauth2/token'. **Required**: No.
:client_config['redirect_uri'] (str): Redirection endpoint URI. **Default**: 'urn:ietf:wg:oauth:2.0:oob'. **Required**: No.
:client_config['revoke_uri'] (str): Revoke endpoint URI. **Default**: None. **Required**: No.
:service_config (dict): Place holding dictionary for client configuration when *client_config_backend* is 'service' or 'settings' and using service account. **Required**: Yes, only if *client_config_backend* is 'service' or 'settings' and not using *client_config*
:service_config['client_user_email'] (str): User email that authority was delegated_ to. **Required**: No.
:service_config['client_json_file_path'] (str): Path to service account `.json` key file. **Required**: No.
:service_config['client_json_dict'] (dict): Service account `.json` key file loaded into a dictionary. **Required**: No.
:service_config['client_json'] (str): Service account `.json` key file loaded into a string. **Required**: No.
:save_credentials (bool): True if you want to save credentials. **Default**: False. **Required**: No.
:save_credentials_backend (str): Backend to save credentials to. 'file' and 'dictionary' are the only valid values for now. **Default**: 'file'. **Required**: No.
:save_credentials_file (str): Destination of credentials file. **Required**: Yes, only if *save_credentials_backend* is 'file'.
:save_credentials_dict (dict): Dict to use for storing credentials. **Required**: Yes, only if *save_credentials_backend* is 'dictionary'.
:save_credentials_key (str): Key within the *save_credentials_dict* to store the credentials in. **Required**: Yes, only if *save_credentials_backend* is 'dictionary'.
:get_refresh_token (bool): True if you want to retrieve refresh token along with access token. **Default**: False. **Required**: No.
:oauth_scope (list of str): OAuth scope to authenticate. **Default**: ['https://www.googleapis.com/auth/drive']. **Required**: No.
.. _delegated: https://developers.google.com/admin-sdk/directory/v1/guides/delegation
Sample *settings.yaml*
______________________
::
client_config_backend: settings
client_config:
client_id: 9637341109347.apps.googleusercontent.com
client_secret: psDskOoWr1P602PXRTHi
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials.json
get_refresh_token: True
oauth_scope:
- https://www.googleapis.com/auth/drive.file
- https://www.googleapis.com/auth/drive.install
- https://www.googleapis.com/auth/drive.metadata
Building your own authentication flow
-------------------------------------
You might want to build your own authentication flow. For example, you might
want to integrate your existing website with Drive API. In that case, you can
customize authentication flow as follwing:
1. Get authentication Url from `GetAuthUrl()`_.
2. Ask users to visit the authentication Url and grant access to your application. Retrieve authentication code manually by user or automatically by building your own oauth2callback.
3. Call `Auth(code)`_ with the authentication code you retrieved from step 2.
Your *settings.yaml* will work for your customized authentication flow, too.
Here is a sample code for your customized authentication flow
.. code-block:: python
from pydrive2.auth import GoogleAuth
gauth = GoogleAuth()
auth_url = gauth.GetAuthUrl() # Create authentication url user needs to visit
code = AskUserToVisitLinkAndGiveCode(auth_url) # Your customized authentication flow
gauth.Auth(code) # Authorize and build service from the code
.. _`GetAuthUrl()`: /PyDrive2/pydrive2/#pydrive2.auth.GoogleAuth.GetAuthUrl
.. _`Auth(code)`: /PyDrive2/pydrive2/#pydrive2.auth.GoogleAuth.Auth
Authentication with a service account
--------------------------------------
A `Service account`_ is a special type of Google account intended to represent a
non-human user that needs to authenticate and be authorized to access data in
Google APIs.
Typically, service accounts are used in scenarios such as:
- Running workloads on virtual machines (VMs).
- Running workloads on data centers that call Google APIs.
- Running workloads which are not tied to the lifecycle of a human user.
If we use OAuth client ID we need to do one manual login into the account with
`LocalWebserverAuth()`_. if we use a service account the login is automatic.
.. code-block:: python
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
def login_with_service_account():
"""
Google Drive service with a service account.
note: for the service account to work, you need to share the folder or
files with the service account email.
:return: google auth
"""
# Define the settings dict to use a service account
# We also can use all options available for the settings dict like
# oauth_scope,save_credentials,etc.
settings = {
"client_config_backend": "service",
"service_config": {
"client_json_file_path": "service-secrets.json",
}
}
# Create instance of GoogleAuth
gauth = GoogleAuth(settings=settings)
# Authenticate
gauth.ServiceAuth()
return gauth
.. _`Service account`: https://developers.google.com/workspace/guides/create-credentials#service-account PyDrive2-1.15.0/docs/pydrive2.rst 0000664 0000000 0000000 00000001475 14334672560 0016511 0 ustar 00root root 0000000 0000000 pydrive2 package
================
pydrive2.apiattr module
-----------------------
.. automodule:: pydrive2.apiattr
:members:
:undoc-members:
:show-inheritance:
pydrive2.auth module
--------------------
.. automodule:: pydrive2.auth
:members:
:undoc-members:
:show-inheritance:
pydrive2.drive module
---------------------
.. automodule:: pydrive2.drive
:members:
:undoc-members:
:show-inheritance:
pydrive2.files module
---------------------
.. automodule:: pydrive2.files
:members:
:undoc-members:
:show-inheritance:
pydrive2.settings module
------------------------
.. automodule:: pydrive2.settings
:members:
:undoc-members:
:show-inheritance:
pydrive2.fs module
------------------------
.. autoclass:: pydrive2.fs.GDriveFileSystem
:show-inheritance:
PyDrive2-1.15.0/docs/quickstart.rst 0000664 0000000 0000000 00000013727 14334672560 0017142 0 ustar 00root root 0000000 0000000 Quickstart
=============================
Authentication
--------------
Drive API requires OAuth2.0 for authentication. *PyDrive2* makes your life much easier by handling complex authentication steps for you.
1. Go to `APIs Console`_ and make your own project.
2. Search for 'Google Drive API', select the entry, and click 'Enable'.
3. Select 'Credentials' from the left menu, click 'Create Credentials', select 'OAuth client ID'.
4. Now, the product name and consent screen need to be set -> click 'Configure consent screen' and follow the instructions. Once finished:
a. Select 'Application type' to be *Web application*.
b. Enter an appropriate name.
c. Input *http://localhost:8080/* for 'Authorized redirect URIs'.
d. Click 'Create'.
5. Click 'Download JSON' on the right side of Client ID to download **client_secret_.json**.
The downloaded file has all authentication information of your application.
**Rename the file to "client_secrets.json" and place it in your working directory.**
Create *quickstart.py* file and copy and paste the following code.
.. code-block:: python
from pydrive2.auth import GoogleAuth
gauth = GoogleAuth()
gauth.LocalWebserverAuth() # Creates local webserver and auto handles authentication.
Run this code with *python quickstart.py* and you will see a web browser asking you for authentication. Click *Accept* and you are done with authentication. For more details, take a look at documentation: `OAuth made easy`_
.. _`APIs Console`: https://console.developers.google.com/iam-admin/projects
.. _`OAuth made easy`: /PyDrive2/oauth/
Creating and Updating Files
---------------------------
There are many methods to create and update file metadata and contents. With *PyDrive2*, all you have to know is
`Upload()`_ method which makes optimal API call for you. Add the following code to your *quickstart.py* and run it.
.. code-block:: python
from pydrive2.drive import GoogleDrive
drive = GoogleDrive(gauth)
file1 = drive.CreateFile({'title': 'Hello.txt'}) # Create GoogleDriveFile instance with title 'Hello.txt'.
file1.SetContentString('Hello World!') # Set content of the file from given string.
file1.Upload()
This code will create a new file with title *Hello.txt* and its content *Hello World!*. You can see and open this
file from `Google Drive`_ if you want. For more details, take a look at documentation: `File management made easy`_
.. _`Upload()`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile.Upload
.. _`Google Drive`: https://drive.google.com
.. _`File management made easy`: /PyDrive2/filemanagement/
Listing Files
-------------
*PyDrive2* handles paginations and parses response as list of `GoogleDriveFile`_. Let's get title and id of all the files in the root folder of Google Drive. Again, add the following code to *quickstart.py* and execute it.
.. code-block:: python
# Auto-iterate through all files that matches this query
file_list = drive.ListFile({'q': "'root' in parents and trashed=false"}).GetList()
for file1 in file_list:
print('title: %s, id: %s' % (file1['title'], file1['id']))
Creating a Folder
-----------------
GoogleDrive treats everything as a file and assigns different mimetypes for different file formats. A folder is thus
also a file with a special mimetype. The code below allows you to add a subfolder to an existing folder.
.. code-block:: python
def create_folder(parent_folder_id, subfolder_name):
newFolder = drive.CreateFile({'title': subfolder_name, "parents": [{"kind": "drive#fileLink", "id": \
parent_folder_id}],"mimeType": "application/vnd.google-apps.folder"})
newFolder.Upload()
return newFolder
Return File ID via File Title
-----------------------------
A common task is providing the Google Drive API with a file id.
``get_id_of_title`` demonstrates a simple workflow to return the id of a file handle by searching the file titles in a
given directory. The function takes two arguments, ``title`` and ``parent_directory_id``. ``title`` is a string that
will be compared against file titles included in a directory identified by the ``parent_directory_id``.
.. code-block:: python
def get_id_of_title(title,parent_directory_id):
foldered_list=drive.ListFile({'q': "'"+parent_directory_id+"' in parents and trashed=false"}).GetList()
for file in foldered_list:
if(file['title']==title):
return file['id']
return None
Browse Folders
--------------
This returns a json output of the data in a directory with some important attributes like size, title, parent_id.
.. code-block:: python
browsed=[]
def folder_browser(folder_list,parent_id):
for element in folder_list:
if type(element) is dict:
print (element['title'])
else:
print (element)
print("Enter Name of Folder You Want to Use\nEnter '/' to use current folder\nEnter ':' to create New Folder and
use that" )
inp=input()
if inp=='/':
return parent_id
elif inp==':':
print("Enter Name of Folder You Want to Create")
inp=input()
newfolder=create_folder(parent_id,inp)
if not os.path.exists(HOME_DIRECTORY+ROOT_FOLDER_NAME+os.path.sep+USERNAME):
os.makedirs(HOME_DIRECTORY+ROOT_FOLDER_NAME+os.path.sep+USERNAME)
return newfolder['id']
else:
folder_selected=inp
for element in folder_list:
if type(element) is dict:
if element["title"]==folder_selected:
struc=element["list"]
browsed.append(folder_selected)
print("Inside "+folder_selected)
return folder_browser(struc,element['id'])
here ``folder_list`` is the list of folders that is present
You will see title and id of all the files and folders in root folder of your Google Drive. For more details, refer to the documentation: `File listing made easy`_
.. _`GoogleDriveFile`: /PyDrive2/pydrive2/#pydrive2.files.GoogleDriveFile
.. _`File listing made easy`: /PyDrive2/filelist/
PyDrive2-1.15.0/docs/requirements.txt 0000664 0000000 0000000 00000000014 14334672560 0017463 0 ustar 00root root 0000000 0000000 sphinx
furo
PyDrive2-1.15.0/examples/ 0000775 0000000 0000000 00000000000 14334672560 0015072 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/examples/Upload-and-autoconvert-to-Google-Drive-Format-Example/ 0000775 0000000 0000000 00000000000 14334672560 0027145 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/examples/Upload-and-autoconvert-to-Google-Drive-Format-Example/README 0000664 0000000 0000000 00000001134 14334672560 0030024 0 ustar 00root root 0000000 0000000 This script uploads a file to a folder on Google Drive. If the file can be
converted into the Google format, such as a Google Sheet, it will be converted.
To run the script you need to complete the following steps:
1. Update settings.yaml with your:
- client ID,
- client secret, and
- credential storage path
2. Update upload.py with the location you save your settings.yaml file. This
can be an absolute or relative path.
This example is adapted from Evren Yurtesen (https://github.com/yurtesen/)
with his consent.
Originally posted here: https://github.com/prasmussen/gdrive/issues/154
PyDrive2-1.15.0/examples/Upload-and-autoconvert-to-Google-Drive-Format-Example/settings.yaml 0000664 0000000 0000000 00000001121 14334672560 0031664 0 ustar 00root root 0000000 0000000 # Original author: Evren Yurtesen - https://github.com/yurtesen/
client_config_backend: settings
client_config:
client_id:
client_secret:
auth_uri: https://accounts.google.com/o/oauth2/auth
token_uri: https://accounts.google.com/o/oauth2/token
redirect_uri: urn:ietf:wg:oauth:2.0:oob
revoke_uri:
save_credentials: True
save_credentials_backend: file
save_credentials_file:
get_refresh_token: True
oauth_scope:
- https://www.googleapis.com/auth/drive.file
PyDrive2-1.15.0/examples/Upload-and-autoconvert-to-Google-Drive-Format-Example/upload.py 0000664 0000000 0000000 00000003502 14334672560 0031003 0 ustar 00root root 0000000 0000000 # Original author: Evren Yurtesen - https://github.com/yurtesen/
"""
Uploads a file to a specific folder in Google Drive and converts it to a
Google Doc/Sheet/etc. if possible.
usage: upload.py
example usage: upload.py 0B5XXXXY9KddXXXXXXXA2c3ZXXXX /path/to/my/file
"""
import sys
from os import path
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
from pydrive2.settings import LoadSettingsFile
# Update this value to the correct location.
# e.g. "/usr/local/scripts/pydrive/settings.yaml"
PATH_TO_SETTINGS_FILE = None
assert PATH_TO_SETTINGS_FILE is not None # Fail if path not specified.
gauth = GoogleAuth()
gauth.settings = LoadSettingsFile(filename=PATH_TO_SETTINGS_FILE)
gauth.CommandLineAuth()
drive = GoogleDrive(gauth)
# If provided arguments incorrect, print usage instructions and exit.
if len(sys.argv) < 2:
print("usage: upload.py ")
exit(1) # Exit program as incorrect parameters provided.
parentId = sys.argv[1]
myFilePath = sys.argv[2]
myFileName = path.basename(sys.argv[2])
# Check if file name already exists in folder.
file_list = drive.ListFile(
{
"q": '"{}" in parents and title="{}" and trashed=false'.format(
parentId, myFileName
)
}
).GetList()
# If file is found, update it, otherwise create new file.
if len(file_list) == 1:
myFile = file_list[0]
else:
myFile = drive.CreateFile(
{"parents": [{"kind": "drive#fileLink", "id": parentId}]}
)
# Upload new file content.
myFile.SetContentFile(myFilePath)
myFile["title"] = myFileName
# The `convert` flag indicates to Google Drive whether to convert the
# uploaded file into a Google Drive native format, i.e. Google Sheet for
# CSV or Google Document for DOCX.
myFile.Upload({"convert": True})
PyDrive2-1.15.0/examples/strip_bom_example.py 0000664 0000000 0000000 00000003423 14334672560 0021157 0 ustar 00root root 0000000 0000000 from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
# Authenticate the client.
gauth = GoogleAuth()
gauth.LocalWebserverAuth()
drive = GoogleDrive(gauth)
# Create a file, set content, and upload.
file1 = drive.CreateFile()
original_file_content = "Generic, non-exhaustive\n ASCII test string."
file1.SetContentString(original_file_content)
# {'convert': True} triggers conversion to a Google Drive document.
file1.Upload({"convert": True})
# Download the file.
file2 = drive.CreateFile({"id": file1["id"]})
# Print content before download.
print("Original text:")
print(bytes(original_file_content.encode("unicode-escape")))
print("Number of chars: %d" % len(original_file_content))
print("")
# Original text:
# Generic, non-exhaustive\n ASCII test string.
# Number of chars: 43
# Download document as text file WITH the BOM and print the contents.
content_with_bom = file2.GetContentString(mimetype="text/plain")
print("Content with BOM:")
print(bytes(content_with_bom.encode("unicode-escape")))
print("Number of chars: %d" % len(content_with_bom))
print("")
# Content with BOM:
# \ufeffGeneric, non-exhaustive\r\n ASCII test string.
# Number of chars: 45
# Download document as text file WITHOUT the BOM and print the contents.
content_without_bom = file2.GetContentString(
mimetype="text/plain", remove_bom=True
)
print("Content without BOM:")
print(bytes(content_without_bom.encode("unicode-escape")))
print("Number of chars: %d" % len(content_without_bom))
print("")
# Content without BOM:
# Generic, non-exhaustive\r\n ASCII test string.
# Number of chars: 44
# *NOTE*: When downloading a Google Drive document as text file, line-endings
# are converted to the Windows-style: \r\n.
# Delete the file as necessary.
file1.Delete()
PyDrive2-1.15.0/examples/using_folders.py 0000664 0000000 0000000 00000001430 14334672560 0020305 0 ustar 00root root 0000000 0000000 from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
gauth = GoogleAuth()
gauth.LocalWebserverAuth()
drive = GoogleDrive(gauth)
# Create folder.
folder_metadata = {
"title": "",
# The mimetype defines this new file as a folder, so don't change this.
"mimeType": "application/vnd.google-apps.folder",
}
folder = drive.CreateFile(folder_metadata)
folder.Upload()
# Get folder info and print to screen.
folder_title = folder["title"]
folder_id = folder["id"]
print("title: %s, id: %s" % (folder_title, folder_id))
# Upload file to folder.
f = drive.CreateFile(
{"parents": [{"kind": "drive#fileLink", "id": folder_id}]}
)
# Make sure to add the path to the file to upload below.
f.SetContentFile("")
f.Upload()
PyDrive2-1.15.0/pydrive2/ 0000775 0000000 0000000 00000000000 14334672560 0015020 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/__init__.py 0000664 0000000 0000000 00000000000 14334672560 0017117 0 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/__pyinstaller/ 0000775 0000000 0000000 00000000000 14334672560 0017664 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/__pyinstaller/__init__.py 0000664 0000000 0000000 00000000216 14334672560 0021774 0 ustar 00root root 0000000 0000000 import os
def get_hook_dirs():
return [os.path.dirname(__file__)]
def get_PyInstaller_tests():
return [os.path.dirname(__file__)]
PyDrive2-1.15.0/pydrive2/__pyinstaller/hook-googleapiclient.py 0000664 0000000 0000000 00000000401 14334672560 0024334 0 ustar 00root root 0000000 0000000 from PyInstaller.utils.hooks import ( # pylint: disable=import-error
copy_metadata,
collect_data_files,
)
datas = copy_metadata("google-api-python-client")
datas += collect_data_files(
"googleapiclient", excludes=["*.txt", "**/__pycache__"]
)
PyDrive2-1.15.0/pydrive2/__pyinstaller/test_hook-googleapiclient.py 0000664 0000000 0000000 00000002157 14334672560 0025405 0 ustar 00root root 0000000 0000000 import subprocess
from PyInstaller import __main__ as pyi_main
# NOTE: importlib.resources.contents is available in py3.7+, but due to how
# pyinstaller handles importlib, we need to use the importlib_resources
# backport if there are any resources methods that are not available in a given
# python version, which ends up being py<3.10
_APP_SOURCE = """
import sys
if sys.version_info >= (3, 10):
from importlib.resources import contents
else:
from importlib_resources import contents
import pydrive2.files
cache_files = contents(
"googleapiclient.discovery_cache.documents"
)
assert len(cache_files) > 0
"""
def test_pyi_hook_google_api_client(tmp_path):
app_name = "userapp"
workpath = tmp_path / "build"
distpath = tmp_path / "dist"
app = tmp_path / f"{app_name}.py"
app.write_text(_APP_SOURCE)
pyi_main.run(
[
"--workpath",
str(workpath),
"--distpath",
str(distpath),
"--specpath",
str(tmp_path),
str(app),
],
)
subprocess.run([str(distpath / app_name / app_name)], check=True)
PyDrive2-1.15.0/pydrive2/apiattr.py 0000664 0000000 0000000 00000012320 14334672560 0017034 0 ustar 00root root 0000000 0000000 class ApiAttribute:
"""A data descriptor that sets and returns values."""
def __init__(self, name):
"""Create an instance of ApiAttribute.
:param name: name of this attribute.
:type name: str.
"""
self.name = name
def __get__(self, obj, type=None):
"""Accesses value of this attribute."""
return obj.attr.get(self.name)
def __set__(self, obj, value):
"""Write value of this attribute."""
obj.attr[self.name] = value
if obj.dirty.get(self.name) is not None:
obj.dirty[self.name] = True
def __del__(self, obj=None):
"""Delete value of this attribute."""
if not obj:
return
del obj.attr[self.name]
if obj.dirty.get(self.name) is not None:
del obj.dirty[self.name]
class ApiAttributeMixin:
"""Mixin to initialize required global variables to use ApiAttribute."""
def __init__(self):
self.attr = {}
self.dirty = {}
self.http = None # Any element may make requests and will require this
# field.
class ApiResource(dict):
"""Super class of all api resources.
Inherits and behaves as a python dictionary to handle api resources.
Save clean copy of metadata in self.metadata as a dictionary.
Provides changed metadata elements to efficiently update api resources.
"""
auth = ApiAttribute("auth")
def __init__(self, *args, **kwargs):
"""Create an instance of ApiResource."""
super().__init__()
self.update(*args, **kwargs)
self.metadata = dict(self)
def __getitem__(self, key):
"""Overwritten method of dictionary.
:param key: key of the query.
:type key: str.
:returns: value of the query.
"""
return dict.__getitem__(self, key)
def __setitem__(self, key, val):
"""Overwritten method of dictionary.
:param key: key of the query.
:type key: str.
:param val: value of the query.
"""
dict.__setitem__(self, key, val)
def __repr__(self):
"""Overwritten method of dictionary."""
dict_representation = dict.__repr__(self)
return f"{type(self).__name__}({dict_representation})"
def update(self, *args, **kwargs):
"""Overwritten method of dictionary."""
for k, v in dict(*args, **kwargs).items():
self[k] = v
def UpdateMetadata(self, metadata=None):
"""Update metadata and mark all of them to be clean."""
if metadata:
self.update(metadata)
self.metadata = dict(self)
def GetChanges(self):
"""Returns changed metadata elements to update api resources efficiently.
:returns: dict -- changed metadata elements.
"""
dirty = {}
for key in self:
if self.metadata.get(key) is None:
dirty[key] = self[key]
elif self.metadata[key] != self[key]:
dirty[key] = self[key]
return dirty
class ApiResourceList(ApiAttributeMixin, ApiResource):
"""Abstract class of all api list resources.
Inherits ApiResource and builds iterator to list any API resource.
"""
metadata = ApiAttribute("metadata")
def __init__(self, auth=None, metadata=None):
"""Create an instance of ApiResourceList.
:param auth: authorized GoogleAuth instance.
:type auth: GoogleAuth.
:param metadata: parameter to send to list command.
:type metadata: dict.
"""
ApiAttributeMixin.__init__(self)
ApiResource.__init__(self)
self.auth = auth
self.UpdateMetadata()
if metadata:
self.update(metadata)
def __iter__(self):
"""Returns iterator object.
:returns: ApiResourceList -- self
"""
return self
def __next__(self):
"""Make API call to list resources and return them.
Auto updates 'pageToken' every time it makes API call and
raises StopIteration when it reached the end of iteration.
:returns: list -- list of API resources.
:raises: StopIteration
"""
if "pageToken" in self and self["pageToken"] is None:
raise StopIteration
result = self._GetList()
self["pageToken"] = self.metadata.get("nextPageToken")
return result
def GetList(self):
"""Get list of API resources.
If 'maxResults' is not specified, it will automatically iterate through
every resources available. Otherwise, it will make API call once and
update 'pageToken'.
:returns: list -- list of API resources.
"""
if self.get("maxResults") is None:
self["maxResults"] = 1000
result = []
for x in self:
result.extend(x)
del self["maxResults"]
return result
else:
return next(self)
def _GetList(self):
"""Helper function which actually makes API call.
Should be overwritten.
:raises: NotImplementedError
"""
raise NotImplementedError
def Reset(self):
"""Resets current iteration"""
if "pageToken" in self:
del self["pageToken"]
PyDrive2-1.15.0/pydrive2/auth.py 0000664 0000000 0000000 00000063714 14334672560 0016346 0 ustar 00root root 0000000 0000000 import json
import webbrowser
import httplib2
import oauth2client.clientsecrets as clientsecrets
import threading
from googleapiclient.discovery import build
from functools import wraps
from oauth2client.service_account import ServiceAccountCredentials
from oauth2client.client import FlowExchangeError
from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.client import OOB_CALLBACK_URN
from oauth2client.contrib.dictionary_storage import DictionaryStorage
from oauth2client.file import Storage
from oauth2client.tools import ClientRedirectHandler
from oauth2client.tools import ClientRedirectServer
from oauth2client._helpers import scopes_to_string
from .apiattr import ApiAttribute
from .apiattr import ApiAttributeMixin
from .settings import LoadSettingsFile
from .settings import ValidateSettings
from .settings import SettingsError
from .settings import InvalidConfigError
class AuthError(Exception):
"""Base error for authentication/authorization errors."""
class InvalidCredentialsError(IOError):
"""Error trying to read credentials file."""
class AuthenticationRejected(AuthError):
"""User rejected authentication."""
class AuthenticationError(AuthError):
"""General authentication error."""
class RefreshError(AuthError):
"""Access token refresh error."""
def LoadAuth(decoratee):
"""Decorator to check if the auth is valid and loads auth if not."""
@wraps(decoratee)
def _decorated(self, *args, **kwargs):
# Initialize auth if needed.
if self.auth is None:
self.auth = GoogleAuth()
# Re-create access token if it expired.
if self.auth.access_token_expired:
if getattr(self.auth, "auth_method", False) == "service":
self.auth.ServiceAuth()
else:
self.auth.LocalWebserverAuth()
# Initialise service if not built yet.
if self.auth.service is None:
self.auth.Authorize()
# Ensure that a thread-safe HTTP object is provided.
if (
kwargs is not None
and "param" in kwargs
and kwargs["param"] is not None
and "http" in kwargs["param"]
and kwargs["param"]["http"] is not None
):
self.http = kwargs["param"]["http"]
del kwargs["param"]["http"]
else:
# If HTTP object not specified, create or resuse an HTTP
# object from the thread local storage.
if not getattr(self.auth.thread_local, "http", None):
self.auth.thread_local.http = self.auth.Get_Http_Object()
self.http = self.auth.thread_local.http
return decoratee(self, *args, **kwargs)
return _decorated
def CheckServiceAuth(decoratee):
"""Decorator to authorize service account."""
@wraps(decoratee)
def _decorated(self, *args, **kwargs):
self.auth_method = "service"
dirty = False
save_credentials = self.settings.get("save_credentials")
if self.credentials is None and save_credentials:
self.LoadCredentials()
if self.credentials is None:
decoratee(self, *args, **kwargs)
self.Authorize()
dirty = True
elif self.access_token_expired:
self.Refresh()
dirty = True
self.credentials.set_store(self._default_storage)
if dirty and save_credentials:
self.SaveCredentials()
return _decorated
def CheckAuth(decoratee):
"""Decorator to check if it requires OAuth2 flow request."""
@wraps(decoratee)
def _decorated(self, *args, **kwargs):
dirty = False
code = None
save_credentials = self.settings.get("save_credentials")
if self.credentials is None and save_credentials:
self.LoadCredentials()
if self.flow is None:
self.GetFlow()
if self.credentials is None:
code = decoratee(self, *args, **kwargs)
dirty = True
else:
if self.access_token_expired:
if self.credentials.refresh_token is not None:
self.Refresh()
else:
code = decoratee(self, *args, **kwargs)
dirty = True
if code is not None:
self.Auth(code)
self.credentials.set_store(self._default_storage)
if dirty and save_credentials:
self.SaveCredentials()
return _decorated
class GoogleAuth(ApiAttributeMixin):
"""Wrapper class for oauth2client library in google-api-python-client.
Loads all settings and credentials from one 'settings.yaml' file
and performs common OAuth2.0 related functionality such as authentication
and authorization.
"""
DEFAULT_SETTINGS = {
"client_config_backend": "file",
"client_config_file": "client_secrets.json",
"save_credentials": False,
"oauth_scope": ["https://www.googleapis.com/auth/drive"],
}
CLIENT_CONFIGS_LIST = [
"client_id",
"client_secret",
"auth_uri",
"token_uri",
"revoke_uri",
"redirect_uri",
]
SERVICE_CONFIGS_LIST = ["client_user_email"]
settings = ApiAttribute("settings")
client_config = ApiAttribute("client_config")
flow = ApiAttribute("flow")
credentials = ApiAttribute("credentials")
http = ApiAttribute("http")
service = ApiAttribute("service")
auth_method = ApiAttribute("auth_method")
def __init__(
self, settings_file="settings.yaml", http_timeout=None, settings=None
):
"""Create an instance of GoogleAuth.
:param settings_file: path of settings file. 'settings.yaml' by default.
:type settings_file: str.
:param settings: settings dict.
:type settings: dict.
"""
self.http_timeout = http_timeout
ApiAttributeMixin.__init__(self)
self.thread_local = threading.local()
self.client_config = {}
if settings is None and settings_file:
try:
settings = LoadSettingsFile(settings_file)
except SettingsError:
pass
self.settings = settings or self.DEFAULT_SETTINGS
ValidateSettings(self.settings)
storages, default = self._InitializeStoragesFromSettings()
self._storages = storages
self._default_storage = default
@property
def access_token_expired(self):
"""Checks if access token doesn't exist or is expired.
:returns: bool -- True if access token doesn't exist or is expired.
"""
if self.credentials is None:
return True
return self.credentials.access_token_expired
@CheckAuth
def LocalWebserverAuth(
self, host_name="localhost", port_numbers=None, launch_browser=True
):
"""Authenticate and authorize from user by creating local web server and
retrieving authentication code.
This function is not for web server application. It creates local web
server for user from standalone application.
:param host_name: host name of the local web server.
:type host_name: str.
:param port_numbers: list of port numbers to be tried to used.
:type port_numbers: list.
:param launch_browser: should browser be launched automatically
:type launch_browser: bool
:returns: str -- code returned from local web server
:raises: AuthenticationRejected, AuthenticationError
"""
if port_numbers is None:
port_numbers = [
8080,
8090,
] # Mutable objects should not be default
# values, as each call's changes are global.
success = False
port_number = 0
for port in port_numbers:
port_number = port
try:
httpd = ClientRedirectServer(
(host_name, port), ClientRedirectHandler
)
except OSError:
pass
else:
success = True
break
if success:
oauth_callback = f"http://{host_name}:{port_number}/"
else:
print(
"Failed to start a local web server. Please check your firewall"
)
print(
"settings and locally running programs that may be blocking or"
)
print("using configured ports. Default ports are 8080 and 8090.")
raise AuthenticationError()
self.flow.redirect_uri = oauth_callback
authorize_url = self.GetAuthUrl()
if launch_browser:
webbrowser.open(authorize_url, new=1, autoraise=True)
print("Your browser has been opened to visit:")
else:
print("Open your browser to visit:")
print()
print(" " + authorize_url)
print()
httpd.handle_request()
if "error" in httpd.query_params:
print("Authentication request was rejected")
raise AuthenticationRejected("User rejected authentication")
if "code" in httpd.query_params:
return httpd.query_params["code"]
else:
print(
'Failed to find "code" in the query parameters of the redirect.'
)
print("Try command-line authentication")
raise AuthenticationError("No code found in redirect")
@CheckAuth
def CommandLineAuth(self):
"""Authenticate and authorize from user by printing authentication url
retrieving authentication code from command-line.
:returns: str -- code returned from commandline.
"""
self.flow.redirect_uri = OOB_CALLBACK_URN
authorize_url = self.GetAuthUrl()
print("Go to the following link in your browser:")
print()
print(" " + authorize_url)
print()
return input("Enter verification code: ").strip()
@CheckServiceAuth
def ServiceAuth(self):
"""Authenticate and authorize using P12 private key, client id
and client email for a Service account.
:raises: AuthError, InvalidConfigError
"""
if set(self.SERVICE_CONFIGS_LIST) - set(self.client_config):
self.LoadServiceConfigSettings()
scopes = scopes_to_string(self.settings["oauth_scope"])
keyfile_name = self.client_config.get("client_json_file_path")
keyfile_dict = self.client_config.get("client_json_dict")
keyfile_json = self.client_config.get("client_json")
if not keyfile_dict and keyfile_json:
# Compensating for missing ServiceAccountCredentials.from_json_keyfile
keyfile_dict = json.loads(keyfile_json)
if keyfile_dict:
self.credentials = (
ServiceAccountCredentials.from_json_keyfile_dict(
keyfile_dict=keyfile_dict, scopes=scopes
)
)
elif keyfile_name:
self.credentials = (
ServiceAccountCredentials.from_json_keyfile_name(
filename=keyfile_name, scopes=scopes
)
)
else:
service_email = self.client_config["client_service_email"]
file_path = self.client_config["client_pkcs12_file_path"]
self.credentials = ServiceAccountCredentials.from_p12_keyfile(
service_account_email=service_email,
filename=file_path,
scopes=scopes,
)
user_email = self.client_config.get("client_user_email")
if user_email:
self.credentials = self.credentials.create_delegated(
sub=user_email
)
def _InitializeStoragesFromSettings(self):
result = {"file": None, "dictionary": None}
backend = self.settings.get("save_credentials_backend")
save_credentials = self.settings.get("save_credentials")
if backend == "file":
credentials_file = self.settings.get("save_credentials_file")
if credentials_file is None:
raise InvalidConfigError(
"Please specify credentials file to read"
)
result[backend] = Storage(credentials_file)
elif backend == "dictionary":
creds_dict = self.settings.get("save_credentials_dict")
if creds_dict is None:
raise InvalidConfigError("Please specify credentials dict")
creds_key = self.settings.get("save_credentials_key")
if creds_key is None:
raise InvalidConfigError("Please specify credentials key")
result[backend] = DictionaryStorage(creds_dict, creds_key)
elif save_credentials:
raise InvalidConfigError(
"Unknown save_credentials_backend: %s" % backend
)
return result, result.get(backend)
def LoadCredentials(self, backend=None):
"""Loads credentials or create empty credentials if it doesn't exist.
:param backend: target backend to save credential to.
:type backend: str.
:raises: InvalidConfigError
"""
if backend is None:
backend = self.settings.get("save_credentials_backend")
if backend is None:
raise InvalidConfigError("Please specify credential backend")
if backend == "file":
self.LoadCredentialsFile()
elif backend == "dictionary":
self._LoadCredentialsDictionary()
else:
raise InvalidConfigError("Unknown save_credentials_backend")
def LoadCredentialsFile(self, credentials_file=None):
"""Loads credentials or create empty credentials if it doesn't exist.
Loads credentials file from path in settings if not specified.
:param credentials_file: path of credentials file to read.
:type credentials_file: str.
:raises: InvalidConfigError, InvalidCredentialsError
"""
if credentials_file is None:
self._default_storage = self._storages["file"]
if self._default_storage is None:
raise InvalidConfigError(
"Backend `file` is not configured, specify "
"credentials file to read in the settings "
"file or pass an explicit value"
)
else:
self._default_storage = Storage(credentials_file)
try:
self.credentials = self._default_storage.get()
except OSError:
raise InvalidCredentialsError(
"Credentials file cannot be symbolic link"
)
if self.credentials:
self.credentials.set_store(self._default_storage)
def _LoadCredentialsDictionary(self):
self._default_storage = self._storages["dictionary"]
if self._default_storage is None:
raise InvalidConfigError(
"Backend `dictionary` is not configured, specify "
"credentials dict and key to read in the settings file"
)
self.credentials = self._default_storage.get()
if self.credentials:
self.credentials.set_store(self._default_storage)
def SaveCredentials(self, backend=None):
"""Saves credentials according to specified backend.
If you have any specific credentials backend in mind, don't use this
function and use the corresponding function you want.
:param backend: backend to save credentials.
:type backend: str.
:raises: InvalidConfigError
"""
if backend is None:
backend = self.settings.get("save_credentials_backend")
if backend is None:
raise InvalidConfigError("Please specify credential backend")
if backend == "file":
self.SaveCredentialsFile()
elif backend == "dictionary":
self._SaveCredentialsDictionary()
else:
raise InvalidConfigError("Unknown save_credentials_backend")
def SaveCredentialsFile(self, credentials_file=None):
"""Saves credentials to the file in JSON format.
:param credentials_file: destination to save file to.
:type credentials_file: str.
:raises: InvalidConfigError, InvalidCredentialsError
"""
if self.credentials is None:
raise InvalidCredentialsError("No credentials to save")
if credentials_file is None:
storage = self._storages["file"]
if storage is None:
raise InvalidConfigError(
"Backend `file` is not configured, specify "
"credentials file to read in the settings "
"file or pass an explicit value"
)
else:
storage = Storage(credentials_file)
try:
storage.put(self.credentials)
except OSError:
raise InvalidCredentialsError(
"Credentials file cannot be symbolic link"
)
def _SaveCredentialsDictionary(self):
if self.credentials is None:
raise InvalidCredentialsError("No credentials to save")
storage = self._storages["dictionary"]
if storage is None:
raise InvalidConfigError(
"Backend `dictionary` is not configured, specify "
"credentials dict and key to write in the settings file"
)
storage.put(self.credentials)
def LoadClientConfig(self, backend=None):
"""Loads client configuration according to specified backend.
If you have any specific backend to load client configuration from in mind,
don't use this function and use the corresponding function you want.
:param backend: backend to load client configuration from.
:type backend: str.
:raises: InvalidConfigError
"""
if backend is None:
backend = self.settings.get("client_config_backend")
if backend is None:
raise InvalidConfigError(
"Please specify client config backend"
)
if backend == "file":
self.LoadClientConfigFile()
elif backend == "settings":
self.LoadClientConfigSettings()
elif backend == "service":
self.LoadServiceConfigSettings()
else:
raise InvalidConfigError("Unknown client_config_backend")
def LoadClientConfigFile(self, client_config_file=None):
"""Loads client configuration file downloaded from APIs console.
Loads client config file from path in settings if not specified.
:param client_config_file: path of client config file to read.
:type client_config_file: str.
:raises: InvalidConfigError
"""
if client_config_file is None:
client_config_file = self.settings["client_config_file"]
try:
client_type, client_info = clientsecrets.loadfile(
client_config_file
)
except clientsecrets.InvalidClientSecretsError as error:
raise InvalidConfigError("Invalid client secrets file %s" % error)
if client_type not in (
clientsecrets.TYPE_WEB,
clientsecrets.TYPE_INSTALLED,
):
raise InvalidConfigError(
"Unknown client_type of client config file"
)
# General settings.
try:
config_index = [
"client_id",
"client_secret",
"auth_uri",
"token_uri",
]
for config in config_index:
self.client_config[config] = client_info[config]
self.client_config["revoke_uri"] = client_info.get("revoke_uri")
self.client_config["redirect_uri"] = client_info["redirect_uris"][
0
]
except KeyError:
raise InvalidConfigError("Insufficient client config in file")
# Service auth related fields.
service_auth_config = ["client_email"]
try:
for config in service_auth_config:
self.client_config[config] = client_info[config]
except KeyError:
pass # The service auth fields are not present, handling code can go here.
def LoadServiceConfigSettings(self):
"""Loads client configuration from settings.
:raises: InvalidConfigError
"""
configs = [
"client_json_file_path",
"client_json_dict",
"client_json",
"client_pkcs12_file_path",
]
for config in configs:
value = self.settings["service_config"].get(config)
if value:
self.client_config[config] = value
break
else:
raise InvalidConfigError(
f"One of {configs} is required for service authentication"
)
if config == "client_pkcs12_file_path":
self.SERVICE_CONFIGS_LIST.append("client_service_email")
for config in self.SERVICE_CONFIGS_LIST:
try:
self.client_config[config] = self.settings["service_config"][
config
]
except KeyError:
err = "Insufficient service config in settings"
err += f"\n\nMissing: {config} key."
raise InvalidConfigError(err)
def LoadClientConfigSettings(self):
"""Loads client configuration from settings file.
:raises: InvalidConfigError
"""
for config in self.CLIENT_CONFIGS_LIST:
try:
self.client_config[config] = self.settings["client_config"][
config
]
except KeyError:
raise InvalidConfigError(
"Insufficient client config in settings"
)
def GetFlow(self):
"""Gets Flow object from client configuration.
:raises: InvalidConfigError
"""
if not all(
config in self.client_config for config in self.CLIENT_CONFIGS_LIST
):
self.LoadClientConfig()
constructor_kwargs = {
"redirect_uri": self.client_config["redirect_uri"],
"auth_uri": self.client_config["auth_uri"],
"token_uri": self.client_config["token_uri"],
"access_type": "online",
}
if self.client_config["revoke_uri"] is not None:
constructor_kwargs["revoke_uri"] = self.client_config["revoke_uri"]
self.flow = OAuth2WebServerFlow(
self.client_config["client_id"],
self.client_config["client_secret"],
scopes_to_string(self.settings["oauth_scope"]),
**constructor_kwargs,
)
if self.settings.get("get_refresh_token"):
self.flow.params.update(
{"access_type": "offline", "approval_prompt": "force"}
)
def Refresh(self):
"""Refreshes the access_token.
:raises: RefreshError
"""
if self.credentials is None:
raise RefreshError("No credential to refresh.")
if (
self.credentials.refresh_token is None
and self.auth_method != "service"
):
raise RefreshError(
"No refresh_token found."
"Please set access_type of OAuth to offline."
)
if self.http is None:
self.http = self._build_http()
try:
self.credentials.refresh(self.http)
except AccessTokenRefreshError as error:
raise RefreshError("Access token refresh failed: %s" % error)
def GetAuthUrl(self):
"""Creates authentication url where user visits to grant access.
:returns: str -- Authentication url.
"""
if self.flow is None:
self.GetFlow()
return self.flow.step1_get_authorize_url()
def Auth(self, code):
"""Authenticate, authorize, and build service.
:param code: Code for authentication.
:type code: str.
:raises: AuthenticationError
"""
self.Authenticate(code)
self.Authorize()
def Authenticate(self, code):
"""Authenticates given authentication code back from user.
:param code: Code for authentication.
:type code: str.
:raises: AuthenticationError
"""
if self.flow is None:
self.GetFlow()
try:
self.credentials = self.flow.step2_exchange(code)
except FlowExchangeError as e:
raise AuthenticationError("OAuth2 code exchange failed: %s" % e)
print("Authentication successful.")
def _build_http(self):
http = httplib2.Http(timeout=self.http_timeout)
# 308's are used by several Google APIs (Drive, YouTube)
# for Resumable Uploads rather than Permanent Redirects.
# This asks httplib2 to exclude 308s from the status codes
# it treats as redirects
# See also: https://stackoverflow.com/a/59850170/298182
try:
http.redirect_codes = http.redirect_codes - {308}
except AttributeError:
# http.redirect_codes does not exist in previous versions
# of httplib2, so pass
pass
return http
def Authorize(self):
"""Authorizes and builds service.
:raises: AuthenticationError
"""
if self.access_token_expired:
raise AuthenticationError(
"No valid credentials provided to authorize"
)
if self.http is None:
self.http = self._build_http()
self.http = self.credentials.authorize(self.http)
self.service = build(
"drive", "v2", http=self.http, cache_discovery=False
)
def Get_Http_Object(self):
"""Create and authorize an httplib2.Http object. Necessary for
thread-safety.
:return: The http object to be used in each call.
:rtype: httplib2.Http
"""
http = self._build_http()
http = self.credentials.authorize(http)
return http
PyDrive2-1.15.0/pydrive2/drive.py 0000664 0000000 0000000 00000003226 14334672560 0016506 0 ustar 00root root 0000000 0000000 from .apiattr import ApiAttributeMixin
from .files import GoogleDriveFile
from .files import GoogleDriveFileList
from .auth import LoadAuth
class GoogleDrive(ApiAttributeMixin):
"""Main Google Drive class."""
def __init__(self, auth=None):
"""Create an instance of GoogleDrive.
:param auth: authorized GoogleAuth instance.
:type auth: pydrive2.auth.GoogleAuth.
"""
ApiAttributeMixin.__init__(self)
self.auth = auth
def CreateFile(self, metadata=None):
"""Create an instance of GoogleDriveFile with auth of this instance.
This method would not upload a file to GoogleDrive.
:param metadata: file resource to initialize GoogleDriveFile with.
:type metadata: dict.
:returns: pydrive2.files.GoogleDriveFile -- initialized with auth of this
instance.
"""
return GoogleDriveFile(auth=self.auth, metadata=metadata)
def ListFile(self, param=None):
"""Create an instance of GoogleDriveFileList with auth of this instance.
This method will not fetch from Files.List().
:param param: parameter to be sent to Files.List().
:type param: dict.
:returns: pydrive2.files.GoogleDriveFileList -- initialized with auth of
this instance.
"""
return GoogleDriveFileList(auth=self.auth, param=param)
@LoadAuth
def GetAbout(self):
"""Return information about the Google Drive of the auth instance.
:returns: A dictionary of Google Drive information like user, usage, quota etc.
"""
return self.auth.service.about().get().execute(http=self.http)
PyDrive2-1.15.0/pydrive2/files.py 0000664 0000000 0000000 00000075617 14334672560 0016514 0 ustar 00root root 0000000 0000000 import io
import mimetypes
import json
from googleapiclient import errors
from googleapiclient.http import MediaIoBaseUpload
from googleapiclient.http import MediaIoBaseDownload
from googleapiclient.http import DEFAULT_CHUNK_SIZE
from functools import wraps
from .apiattr import ApiAttribute
from .apiattr import ApiAttributeMixin
from .apiattr import ApiResource
from .apiattr import ApiResourceList
from .auth import LoadAuth
BLOCK_SIZE = 1024
# Usage: MIME_TYPE_TO_BOM[''][''].
MIME_TYPE_TO_BOM = {
"application/vnd.google-apps.document": {"text/plain": "\ufeff".encode()}
}
class FileNotUploadedError(RuntimeError):
"""Error trying to access metadata of file that is not uploaded."""
class ApiRequestError(IOError):
def __init__(self, http_error):
assert isinstance(http_error, errors.HttpError)
content = json.loads(http_error.content.decode("utf-8"))
self.error = content.get("error", {}) if content else {}
# Initialize args for backward compatibility
super().__init__(http_error)
def GetField(self, field):
"""Returns the `field` from the first error"""
return self.error.get("errors", [{}])[0].get(field, "")
class FileNotDownloadableError(RuntimeError):
"""Error trying to download file that is not downloadable."""
def LoadMetadata(decoratee):
"""Decorator to check if the file has metadata and fetches it if not.
:raises: ApiRequestError, FileNotUploadedError
"""
@wraps(decoratee)
def _decorated(self, *args, **kwargs):
if not self.uploaded:
self.FetchMetadata()
return decoratee(self, *args, **kwargs)
return _decorated
class GoogleDriveFileList(ApiResourceList):
"""Google Drive FileList instance.
Equivalent to Files.list() in Drive APIs.
"""
def __init__(self, auth=None, param=None):
"""Create an instance of GoogleDriveFileList."""
super().__init__(auth=auth, metadata=param)
@LoadAuth
def _GetList(self):
"""Overwritten method which actually makes API call to list files.
:returns: list -- list of pydrive2.files.GoogleDriveFile.
"""
# Teamdrive support
self["supportsAllDrives"] = True
self["includeItemsFromAllDrives"] = True
try:
self.metadata = (
self.auth.service.files()
.list(**dict(self))
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
result = []
for file_metadata in self.metadata["items"]:
tmp_file = GoogleDriveFile(
auth=self.auth, metadata=file_metadata, uploaded=True
)
result.append(tmp_file)
return result
class IoBuffer:
"""Lightweight retention of one chunk."""
def __init__(self, encoding):
self.encoding = encoding
self.chunk = None
def write(self, chunk):
self.chunk = chunk
def read(self):
return (
self.chunk.decode(self.encoding)
if self.chunk and self.encoding
else self.chunk
)
class MediaIoReadable:
def __init__(
self,
request,
encoding=None,
pre_buffer=True,
remove_prefix=b"",
chunksize=DEFAULT_CHUNK_SIZE,
):
"""File-like wrapper around MediaIoBaseDownload.
:param pre_buffer: Whether to read one chunk into an internal buffer
immediately in order to raise any potential errors.
:param remove_prefix: Bytes prefix to remove from internal pre_buffer.
:raises: ApiRequestError
"""
self.done = False
self._fd = IoBuffer(encoding)
self.downloader = MediaIoBaseDownload(
self._fd, request, chunksize=chunksize
)
self.size = None
self._pre_buffer = False
if pre_buffer:
self.read()
if remove_prefix:
chunk = io.BytesIO(self._fd.chunk)
GoogleDriveFile._RemovePrefix(chunk, remove_prefix)
self._fd.chunk = chunk.getvalue()
self._pre_buffer = True
def read(self):
"""
:returns: bytes or str -- chunk (or None if done)
:raises: ApiRequestError
"""
if self._pre_buffer:
self._pre_buffer = False
return self._fd.read()
if self.done:
return None
try:
status, self.done = self.downloader.next_chunk()
self.size = status.total_size
except errors.HttpError as error:
raise ApiRequestError(error)
return self._fd.read()
def __iter__(self):
"""
:raises: ApiRequestError
"""
while True:
chunk = self.read()
if chunk is None:
break
yield chunk
def __len__(self):
return self.size
class GoogleDriveFile(ApiAttributeMixin, ApiResource):
"""Google Drive File instance.
Inherits ApiResource which inherits dict.
Can access and modify metadata like dictionary.
"""
content = ApiAttribute("content")
uploaded = ApiAttribute("uploaded")
metadata = ApiAttribute("metadata")
def __init__(self, auth=None, metadata=None, uploaded=False):
"""Create an instance of GoogleDriveFile.
:param auth: authorized GoogleAuth instance.
:type auth: pydrive2.auth.GoogleAuth
:param metadata: file resource to initialize GoogleDriveFile with.
:type metadata: dict.
:param uploaded: True if this file is confirmed to be uploaded.
:type uploaded: bool.
"""
ApiAttributeMixin.__init__(self)
ApiResource.__init__(self)
self.metadata = {}
self.dirty = {"content": False}
self.auth = auth
self.uploaded = uploaded
if uploaded:
self.UpdateMetadata(metadata)
elif metadata:
self.update(metadata)
self.has_bom = True
def __getitem__(self, key):
"""Overwrites manner of accessing Files resource.
If this file instance is not uploaded and id is specified,
it will try to look for metadata with Files.get().
:param key: key of dictionary query.
:type key: str.
:returns: value of Files resource
:raises: KeyError, FileNotUploadedError
"""
try:
return dict.__getitem__(self, key)
except KeyError as e:
if self.uploaded:
raise KeyError(e)
if self.get("id"):
self.FetchMetadata()
return dict.__getitem__(self, key)
else:
raise FileNotUploadedError()
def SetContentString(self, content, encoding="utf-8"):
"""Set content of this file to be a string.
Creates io.BytesIO instance of utf-8 encoded string.
Sets mimeType to be 'text/plain' if not specified.
:param encoding: The encoding to use when setting the content of this file.
:type encoding: str
:param content: content of the file in string.
:type content: str
"""
self.content = io.BytesIO(content.encode(encoding))
if self.get("mimeType") is None:
self["mimeType"] = "text/plain"
def SetContentFile(self, filename):
"""Set content of this file from a file.
Opens the file specified by this method.
Will be read, uploaded, and closed by Upload() method.
Sets metadata 'title' and 'mimeType' automatically if not specified.
:param filename: name of the file to be uploaded.
:type filename: str.
"""
self.content = open(filename, "rb")
if self.get("title") is None:
self["title"] = filename
if self.get("mimeType") is None:
self["mimeType"] = mimetypes.guess_type(filename)[0]
def GetContentString(
self, mimetype=None, encoding="utf-8", remove_bom=False
):
"""Get content of this file as a string.
:param mimetype: The mimetype of the content string.
:type mimetype: str
:param encoding: The encoding to use when decoding the byte string.
:type encoding: str
:param remove_bom: Whether to strip a known BOM.
:type remove_bom: bool
:returns: str -- utf-8 decoded content of the file
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
"""
if (
self.content is None
or type(self.content) is not io.BytesIO
or self.has_bom == remove_bom
):
self.FetchContent(mimetype, remove_bom)
return self.content.getvalue().decode(encoding)
@LoadAuth
def GetContentFile(
self,
filename,
mimetype=None,
remove_bom=False,
callback=None,
chunksize=DEFAULT_CHUNK_SIZE,
acknowledge_abuse=False,
):
"""Save content of this file as a local file.
:param filename: name of the file to write to.
:type filename: str
:param mimetype: mimeType of the file.
:type mimetype: str
:param remove_bom: Whether to remove the byte order marking.
:type remove_bom: bool
:param callback: passed two arguments: (total transferred, file size).
:type param: callable
:param chunksize: chunksize in bytes (standard 100 MB(1024*1024*100))
:type chunksize: int
:param acknowledge_abuse: Acknowledging the risk and download file
identified as abusive.
:type acknowledge_abuse: bool
:raises: ApiRequestError, FileNotUploadedError
"""
files = self.auth.service.files()
file_id = self.metadata.get("id") or self.get("id")
if not file_id:
raise FileNotUploadedError()
def download(fd, request):
downloader = MediaIoBaseDownload(
fd, self._WrapRequest(request), chunksize=chunksize
)
done = False
while done is False:
status, done = downloader.next_chunk()
if callback:
callback(status.resumable_progress, status.total_size)
with open(filename, mode="w+b") as fd:
# Should use files.export_media instead of files.get_media if
# metadata["mimeType"].startswith("application/vnd.google-apps.").
# But that would first require a slow call to FetchMetadata().
# We prefer to try-except for speed.
try:
download(
fd,
files.get_media(
fileId=file_id, acknowledgeAbuse=acknowledge_abuse
),
)
except errors.HttpError as error:
exc = ApiRequestError(error)
if (
exc.error["code"] != 403
or exc.GetField("reason") != "fileNotDownloadable"
):
raise exc
mimetype = mimetype or "text/plain"
fd.seek(0) # just in case `download()` modified `fd`
try:
download(
fd,
files.export_media(fileId=file_id, mimeType=mimetype),
)
except errors.HttpError as error:
raise ApiRequestError(error)
if mimetype == "text/plain" and remove_bom:
fd.seek(0)
bom = self._GetBOM(mimetype)
if bom:
self._RemovePrefix(fd, bom)
@LoadAuth
def GetContentIOBuffer(
self,
mimetype=None,
encoding=None,
remove_bom=False,
chunksize=DEFAULT_CHUNK_SIZE,
acknowledge_abuse=False,
):
"""Get a file-like object which has a buffered read() method.
:param mimetype: mimeType of the file.
:type mimetype: str
:param encoding: The encoding to use when decoding the byte string.
:type encoding: str
:param remove_bom: Whether to remove the byte order marking.
:type remove_bom: bool
:param chunksize: default read()/iter() chunksize.
:type chunksize: int
:param acknowledge_abuse: Acknowledging the risk and download file
identified as abusive.
:type acknowledge_abuse: bool
:returns: MediaIoReadable -- file-like object.
:raises: ApiRequestError, FileNotUploadedError
"""
files = self.auth.service.files()
file_id = self.metadata.get("id") or self.get("id")
if not file_id:
raise FileNotUploadedError()
# Should use files.export_media instead of files.get_media if
# metadata["mimeType"].startswith("application/vnd.google-apps.").
# But that would first require a slow call to FetchMetadata().
# We prefer to try-except for speed.
try:
request = self._WrapRequest(
files.get_media(
fileId=file_id, acknowledgeAbuse=acknowledge_abuse
)
)
return MediaIoReadable(
request, encoding=encoding, chunksize=chunksize
)
except ApiRequestError as exc:
if (
exc.error["code"] != 403
or exc.GetField("reason") != "fileNotDownloadable"
):
raise exc
mimetype = mimetype or "text/plain"
request = self._WrapRequest(
files.export_media(fileId=file_id, mimeType=mimetype)
)
remove_prefix = (
self._GetBOM(mimetype)
if mimetype == "text/plain" and remove_bom
else b""
)
return MediaIoReadable(
request,
encoding=encoding,
remove_prefix=remove_prefix,
chunksize=chunksize,
)
@LoadAuth
def FetchMetadata(self, fields=None, fetch_all=False):
"""Download file's metadata from id using Files.get().
:param fields: The fields to include, as one string, each entry separated
by commas, e.g. 'fields,labels'.
:type fields: str
:param fetch_all: Whether to fetch all fields.
:type fetch_all: bool
:raises: ApiRequestError, FileNotUploadedError
"""
file_id = self.metadata.get("id") or self.get("id")
if fetch_all:
fields = "*"
if file_id:
try:
metadata = (
self.auth.service.files()
.get(
fileId=file_id,
fields=fields,
# Teamdrive support
supportsAllDrives=True,
)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.uploaded = True
self.UpdateMetadata(metadata)
else:
raise FileNotUploadedError()
@LoadMetadata
def FetchContent(self, mimetype=None, remove_bom=False):
"""Download file's content from download_url.
:raises: ApiRequestError, FileNotUploadedError, FileNotDownloadableError
"""
download_url = self.metadata.get("downloadUrl")
export_links = self.metadata.get("exportLinks")
if download_url:
self.content = io.BytesIO(self._DownloadFromUrl(download_url))
self.dirty["content"] = False
elif export_links and export_links.get(mimetype):
self.content = io.BytesIO(
self._DownloadFromUrl(export_links.get(mimetype))
)
self.dirty["content"] = False
else:
raise FileNotDownloadableError(
"No downloadLink/exportLinks for mimetype found in metadata"
)
if mimetype == "text/plain" and remove_bom:
self._RemovePrefix(
self.content, MIME_TYPE_TO_BOM[self["mimeType"]][mimetype]
)
self.has_bom = not remove_bom
def Upload(self, param=None):
"""Upload/update file by choosing the most efficient method.
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError
"""
if self.uploaded or self.get("id") is not None:
if self.dirty["content"]:
self._FilesUpdate(param=param)
else:
self._FilesPatch(param=param)
else:
self._FilesInsert(param=param)
def Trash(self, param=None):
"""Move a file to the trash.
:raises: ApiRequestError
"""
self._FilesTrash(param=param)
def UnTrash(self, param=None):
"""Move a file out of the trash.
:param param: Additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
self._FilesUnTrash(param=param)
def Delete(self, param=None):
"""Hard-delete a file.
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
self._FilesDelete(param=param)
@LoadAuth
def Copy(self, target_folder=None, new_title=None, param=None):
"""Creates a copy of this file. Folders cannot be copied.
:param target_folder: Folder where the file will be copied.
:type target_folder: GoogleDriveFile, optional
:param new_title: Name of the new file.
:type new_title: str, optional
:param param: addition parameters to pass.
:type param: dict, optional
:raises: ApiRequestError
:return: the copied file
:rtype: GoogleDriveFile
"""
if param is None:
param = {}
param["fileId"] = self["id"]
param["supportsAllDrives"] = True
param["body"] = {}
if target_folder:
param["body"]["parents"] = [{"id": target_folder["id"]}]
param["body"]["title"] = new_title
new_file = None
try:
new_file = (
self.auth.service.files().copy(**param).execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
return GoogleDriveFile(self.auth, new_file)
def InsertPermission(self, new_permission, param=None):
"""Insert a new permission. Re-fetches all permissions after call.
:param new_permission: The new permission to insert, please see the
official Google Drive API guide on permissions.insert
for details.
:type new_permission: object
:param param: addition parameters to pass
:type param: dict
:return: The permission object.
:rtype: object
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
param["body"] = new_permission
# Teamdrive support
param["supportsAllDrives"] = True
try:
permission = (
self.auth.service.permissions()
.insert(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.GetPermissions() # Update permissions field.
return permission
@LoadAuth
def GetPermissions(self):
"""Get file's or shared drive's permissions.
For files in a shared drive, at most 100 results will be returned.
It doesn't paginate and collect all results.
:return: A list of the permission objects.
:rtype: object[]
"""
file_id = self.metadata.get("id") or self.get("id")
# We can't do FetchMetada call (which would nicely update
# local metada cache, etc) here since it doesn't return
# permissions for the team drive use case.
permissions = (
self.auth.service.permissions()
.list(
fileId=file_id,
# Teamdrive support
supportsAllDrives=True,
)
.execute(http=self.http)
).get("items")
if permissions:
self["permissions"] = permissions
self.metadata["permissions"] = permissions
return permissions
def DeletePermission(self, permission_id):
"""Deletes the permission specified by the permission_id.
:param permission_id: The permission id.
:type permission_id: str
:return: True if it succeeds.
:rtype: bool
"""
return self._DeletePermission(permission_id)
def _WrapRequest(self, request):
"""Replaces request.http with self.http.
Ensures thread safety. Similar to other places where we call
`.execute(http=self.http)` to pass a client from the thread local storage.
"""
if self.http:
request.http = self.http
return request
@LoadAuth
def _FilesInsert(self, param=None):
"""Upload a new file using Files.insert().
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["body"] = self.GetChanges()
# teamdrive support
param["supportsAllDrives"] = True
try:
if self.dirty["content"]:
param["media_body"] = self._BuildMediaBody()
metadata = (
self.auth.service.files()
.insert(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.uploaded = True
self.dirty["content"] = False
self.UpdateMetadata(metadata)
@LoadAuth
def _FilesUnTrash(self, param=None):
"""Un-delete (Trash) a file using Files.UnTrash().
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
# Teamdrive support
param["supportsAllDrives"] = True
try:
self.auth.service.files().untrash(**param).execute(http=self.http)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
if self.metadata:
self.metadata["labels"]["trashed"] = False
return True
@LoadAuth
def _FilesTrash(self, param=None):
"""Soft-delete (Trash) a file using Files.Trash().
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
# Teamdrive support
param["supportsAllDrives"] = True
try:
self.auth.service.files().trash(**param).execute(http=self.http)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
if self.metadata:
self.metadata["labels"]["trashed"] = True
return True
@LoadAuth
def _FilesDelete(self, param=None):
"""Delete a file using Files.Delete()
(WARNING: deleting permanently deletes the file!)
:param param: additional parameter to file.
:type param: dict.
:raises: ApiRequestError
"""
if param is None:
param = {}
param["fileId"] = self.metadata.get("id") or self["id"]
# Teamdrive support
param["supportsAllDrives"] = True
try:
self.auth.service.files().delete(**param).execute(http=self.http)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
return True
@LoadAuth
def _FilesUpdate(self, param=None):
"""Update metadata and/or content using Files.Update().
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError, FileNotUploadedError
"""
if param is None:
param = {}
param["body"] = self.GetChanges()
param["fileId"] = self.metadata.get("id") or self.get("id")
# Teamdrive support
param["supportsAllDrives"] = True
try:
if self.dirty["content"]:
param["media_body"] = self._BuildMediaBody()
metadata = (
self.auth.service.files()
.update(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.uploaded = True
self.dirty["content"] = False
self.UpdateMetadata(metadata)
@LoadAuth
def _FilesPatch(self, param=None):
"""Update metadata using Files.Patch().
:param param: additional parameter to upload file.
:type param: dict.
:raises: ApiRequestError, FileNotUploadedError
"""
if param is None:
param = {}
param["body"] = self.GetChanges()
param["fileId"] = self.metadata.get("id") or self.get("id")
# Teamdrive support
param["supportsAllDrives"] = True
try:
metadata = (
self.auth.service.files()
.patch(**param)
.execute(http=self.http)
)
except errors.HttpError as error:
raise ApiRequestError(error)
else:
self.UpdateMetadata(metadata)
def _BuildMediaBody(self):
"""Build MediaIoBaseUpload to get prepared to upload content of the file.
Sets mimeType as 'application/octet-stream' if not specified.
:returns: MediaIoBaseUpload -- instance that will be used to upload content.
"""
if self.get("mimeType") is None:
self["mimeType"] = "application/octet-stream"
return MediaIoBaseUpload(
self.content, self["mimeType"], resumable=True
)
@LoadAuth
def _DownloadFromUrl(self, url):
"""Download file from url using provided credential.
:param url: link of the file to download.
:type url: str.
:returns: str -- content of downloaded file in string.
:raises: ApiRequestError
"""
resp, content = self.http.request(url)
if resp.status != 200:
raise ApiRequestError(errors.HttpError(resp, content, uri=url))
return content
@LoadAuth
def _DeletePermission(self, permission_id):
"""Deletes the permission remotely, and from the file object itself.
:param permission_id: The ID of the permission.
:type permission_id: str
:return: The permission
:rtype: object
"""
file_id = self.metadata.get("id") or self["id"]
try:
self.auth.service.permissions().delete(
fileId=file_id, permissionId=permission_id
).execute()
except errors.HttpError as error:
raise ApiRequestError(error)
else:
if "permissions" in self and "permissions" in self.metadata:
permissions = self["permissions"]
is_not_current_permission = (
lambda per: per["id"] == permission_id
)
permissions = list(
filter(is_not_current_permission, permissions)
)
self["permissions"] = permissions
self.metadata["permissions"] = permissions
return True
@staticmethod
def _GetBOM(mimetype):
"""Based on download mime type (ignores Google Drive mime type)"""
for bom in MIME_TYPE_TO_BOM.values():
if mimetype in bom:
return bom[mimetype]
@staticmethod
def _RemovePrefix(file_object, prefix, block_size=BLOCK_SIZE):
"""Deletes passed prefix by shifting content of passed file object by to
the left. Operation is in-place.
Args:
file_object (obj): The file object to manipulate.
prefix (str): The prefix to insert.
block_size (int): The size of the blocks which are moved one at a time.
"""
prefix_length = len(prefix)
# Detect if prefix exists in file.
content_start = file_object.read(prefix_length)
if content_start == prefix:
# Shift content left by prefix length, by copying 1KiB at a time.
block_to_write = file_object.read(block_size)
current_block_length = len(block_to_write)
# Read and write location in separate variables for simplicity.
read_location = prefix_length + current_block_length
write_location = 0
while current_block_length > 0:
# Write next block.
file_object.seek(write_location)
file_object.write(block_to_write)
# Set write location to the next block.
write_location += len(block_to_write)
# Read next block of input.
file_object.seek(read_location)
block_to_write = file_object.read(block_size)
# Update the current block length and read_location.
current_block_length = len(block_to_write)
read_location += current_block_length
# Truncate the file to its, now shorter, length.
file_object.truncate(read_location - prefix_length)
@staticmethod
def _InsertPrefix(file_object, prefix, block_size=BLOCK_SIZE):
"""Inserts the passed prefix in the beginning of the file, operation is
in-place.
Args:
file_object (obj): The file object to manipulate.
prefix (str): The prefix to insert.
"""
# Read the first two blocks.
first_block = file_object.read(block_size)
second_block = file_object.read(block_size)
# Pointer to the first byte of the next block to be read.
read_location = block_size * 2
# Write BOM.
file_object.seek(0)
file_object.write(prefix)
# {read|write}_location separated for readability.
write_location = len(prefix)
# Write and read block alternatingly.
while len(first_block):
# Write first block.
file_object.seek(write_location)
file_object.write(first_block)
# Increment write_location.
write_location += block_size
# Move second block into first variable.
first_block = second_block
# Read in the next block.
file_object.seek(read_location)
second_block = file_object.read(block_size)
# Increment read_location.
read_location += block_size
PyDrive2-1.15.0/pydrive2/fs/ 0000775 0000000 0000000 00000000000 14334672560 0015430 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/fs/__init__.py 0000664 0000000 0000000 00000000116 14334672560 0017537 0 ustar 00root root 0000000 0000000 from pydrive2.fs.spec import GDriveFileSystem
__all__ = ["GDriveFileSystem"]
PyDrive2-1.15.0/pydrive2/fs/spec.py 0000664 0000000 0000000 00000053122 14334672560 0016737 0 ustar 00root root 0000000 0000000 import appdirs
import errno
import io
import logging
import os
import posixpath
import threading
from collections import defaultdict
from contextlib import contextmanager
from fsspec.spec import AbstractFileSystem
from funcy import cached_property, retry, wrap_prop, wrap_with
from funcy.py3 import cat
from tqdm.utils import CallbackIOWrapper
from pydrive2.drive import GoogleDrive
from pydrive2.fs.utils import IterStream
from pydrive2.auth import GoogleAuth
logger = logging.getLogger(__name__)
FOLDER_MIME_TYPE = "application/vnd.google-apps.folder"
COMMON_SETTINGS = {
"get_refresh_token": True,
"oauth_scope": [
"https://www.googleapis.com/auth/drive",
"https://www.googleapis.com/auth/drive.appdata",
],
}
class GDriveAuthError(Exception):
pass
def _gdrive_retry(func):
def should_retry(exc):
from pydrive2.files import ApiRequestError
if not isinstance(exc, ApiRequestError):
return False
error_code = exc.error.get("code", 0)
result = False
if 500 <= error_code < 600:
result = True
if error_code == 403:
result = exc.GetField("reason") in [
"userRateLimitExceeded",
"rateLimitExceeded",
]
if result:
logger.debug(f"Retrying GDrive API call, error: {exc}.")
return result
# 16 tries, start at 0.5s, multiply by golden ratio, cap at 20s
return retry(
16,
timeout=lambda a: min(0.5 * 1.618**a, 20),
filter_errors=should_retry,
)(func)
@contextmanager
def _wrap_errors():
try:
yield
except Exception as exc:
# Handle AuthenticationError, RefreshError and other auth failures
# It's hard to come up with a narrow exception, since PyDrive throws
# a lot of different errors - broken credentials file, refresh token
# expired, flow failed, etc.
raise GDriveAuthError("Failed to authenticate GDrive") from exc
def _client_auth(
client_id=None,
client_secret=None,
client_json=None,
client_json_file_path=None,
profile=None,
):
if client_json:
save_settings = {
"save_credentials_backend": "dictionary",
"save_credentials_dict": {"creds": client_json},
"save_credentials_key": "creds",
}
else:
creds_file = client_json_file_path
if not creds_file:
cache_dir = os.path.join(
appdirs.user_cache_dir("pydrive2fs", appauthor=False),
client_id,
)
os.makedirs(cache_dir, exist_ok=True)
profile = profile or "default"
creds_file = os.path.join(cache_dir, f"{profile}.json")
save_settings = {
"save_credentials_backend": "file",
"save_credentials_file": creds_file,
}
settings = {
**COMMON_SETTINGS,
"save_credentials": True,
**save_settings,
"client_config_backend": "settings",
"client_config": {
"client_id": client_id,
"client_secret": client_secret,
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"revoke_uri": "https://oauth2.googleapis.com/revoke",
"redirect_uri": "",
},
}
auth = GoogleAuth(settings=settings)
with _wrap_errors():
auth.LocalWebserverAuth()
return auth
def _service_auth(
client_user_email=None,
client_json=None,
client_json_file_path=None,
):
settings = {
**COMMON_SETTINGS,
"client_config_backend": "service",
"service_config": {
"client_user_email": client_user_email,
"client_json": client_json,
"client_json_file_path": client_json_file_path,
},
}
auth = GoogleAuth(settings=settings)
with _wrap_errors():
auth.ServiceAuth()
return auth
class GDriveFileSystem(AbstractFileSystem):
"""Access to gdrive as an fsspec filesystem"""
def __init__(
self,
path,
google_auth=None,
client_id=None,
client_secret=None,
client_user_email=None,
client_json=None,
client_json_file_path=None,
use_service_account=False,
profile=None,
trash_only=True,
acknowledge_abuse=False,
**kwargs,
):
"""Create an instance of GDriveFileSystem.
:param path: gdrive path.
:type path: str.
:param google_auth: Authenticated GoogleAuth instance.
:type google_auth: GoogleAuth.
:param client_id: Client ID of the application.
:type client_id: str
:param client_secret: Client secret of the application.
:type client_secret: str.
:param client_user_email: User email that authority was delegated to
(only for service account).
:type client_user_email: str.
:param client_json: JSON keyfile loaded into a string.
:type client_json: str.
:param client_json_file_path: Path to JSON keyfile.
:type client_json_file_path: str.
:param use_service_account: Use service account.
:type use_service_account: bool.
:param profile: Profile name for caching credentials
(ignored for service account).
:param trash_only: Move files to trash instead of deleting.
:type trash_only: bool.
:param acknowledge_abuse: Acknowledging the risk and download file
identified as abusive.
:type acknowledge_abuse: bool
:type profile: str.
:raises: GDriveAuthError
"""
super().__init__(**kwargs)
self.path = path
self.root, self.base = self.split_path(self.path)
if not google_auth:
if (
not client_json
and not client_json_file_path
and not (client_id and client_secret)
):
raise ValueError(
"Specify credentials using one of these methods: "
"client_id/client_secret or "
"client_json or "
"client_json_file_path"
)
if use_service_account:
google_auth = _service_auth(
client_json=client_json,
client_json_file_path=client_json_file_path,
client_user_email=client_user_email,
)
else:
google_auth = _client_auth(
client_id=client_id,
client_secret=client_secret,
client_json=client_json,
client_json_file_path=client_json_file_path,
profile=profile,
)
self.client = GoogleDrive(google_auth)
self._trash_only = trash_only
self._acknowledge_abuse = acknowledge_abuse
def split_path(self, path):
parts = path.replace("//", "/").rstrip("/").split("/", 1)
if len(parts) == 2:
return parts
else:
return parts[0], ""
@wrap_prop(threading.RLock())
@cached_property
def _ids_cache(self):
cache = {
"dirs": defaultdict(list),
"ids": {},
"root_id": self._get_item_id(
self.path,
use_cache=False,
hint="Confirm the directory exists and you can access it.",
),
}
self._cache_path_id(self.base, cache["root_id"], cache=cache)
for item in self._gdrive_list(
"'{}' in parents and trashed=false".format(cache["root_id"])
):
item_path = posixpath.join(self.base, item["title"])
self._cache_path_id(item_path, item["id"], cache=cache)
return cache
def _cache_path_id(self, path, *item_ids, cache=None):
cache = cache or self._ids_cache
for item_id in item_ids:
cache["dirs"][path].append(item_id)
cache["ids"][item_id] = path
@cached_property
def _list_params(self):
params = {"corpora": "default"}
if self.root != "root" and self.root != "appDataFolder":
drive_id = self._gdrive_shared_drive_id(self.root)
if drive_id:
logger.debug(
"GDrive remote '{}' is using shared drive id '{}'.".format(
self.path, drive_id
)
)
params["driveId"] = drive_id
params["corpora"] = "drive"
return params
@_gdrive_retry
def _gdrive_shared_drive_id(self, item_id):
from pydrive2.files import ApiRequestError
param = {"id": item_id}
# it does not create a file on the remote
item = self.client.CreateFile(param)
# ID of the shared drive the item resides in.
# Only populated for items in shared drives.
try:
item.FetchMetadata("driveId")
except ApiRequestError as exc:
error_code = exc.error.get("code", 0)
if error_code == 404:
raise PermissionError from exc
raise
return item.get("driveId", None)
def _gdrive_list(self, query):
param = {"q": query, "maxResults": 1000}
param.update(self._list_params)
file_list = self.client.ListFile(param)
# Isolate and decorate fetching of remote drive items in pages.
get_list = _gdrive_retry(lambda: next(file_list, None))
# Fetch pages until None is received, lazily flatten the thing.
return cat(iter(get_list, None))
def _gdrive_list_ids(self, query_ids):
query = " or ".join(
f"'{query_id}' in parents" for query_id in query_ids
)
query = f"({query}) and trashed=false"
return self._gdrive_list(query)
def _get_remote_item_ids(self, parent_ids, title):
if not parent_ids:
return None
query = "trashed=false and ({})".format(
" or ".join(
f"'{parent_id}' in parents" for parent_id in parent_ids
)
)
query += " and title='{}'".format(title.replace("'", "\\'"))
# GDrive list API is case insensitive, we need to compare
# all results and pick the ones with the right title
return [
item["id"]
for item in self._gdrive_list(query)
if item["title"] == title
]
def _get_cached_item_ids(self, path, use_cache):
if not path:
return [self.root]
if use_cache:
return self._ids_cache["dirs"].get(path, [])
return []
def _path_to_item_ids(self, path, create=False, use_cache=True):
item_ids = self._get_cached_item_ids(path, use_cache)
if item_ids:
return item_ids
parent_path, title = posixpath.split(path)
parent_ids = self._path_to_item_ids(parent_path, create, use_cache)
item_ids = self._get_remote_item_ids(parent_ids, title)
if item_ids:
return item_ids
return (
[self._create_dir(min(parent_ids), title, path)] if create else []
)
def _get_item_id(self, path, create=False, use_cache=True, hint=None):
bucket, base = self.split_path(path)
assert bucket == self.root
item_ids = self._path_to_item_ids(base, create, use_cache)
if item_ids:
return min(item_ids)
assert not create
raise FileNotFoundError(
errno.ENOENT, os.strerror(errno.ENOENT), hint or path
)
@_gdrive_retry
def _gdrive_create_dir(self, parent_id, title):
parent = {"id": parent_id}
item = self.client.CreateFile(
{"title": title, "parents": [parent], "mimeType": FOLDER_MIME_TYPE}
)
item.Upload()
return item
@wrap_with(threading.RLock())
def _create_dir(self, parent_id, title, remote_path):
cached = self._ids_cache["dirs"].get(remote_path)
if cached:
return cached[0]
item = self._gdrive_create_dir(parent_id, title)
if parent_id == self._ids_cache["root_id"]:
self._cache_path_id(remote_path, item["id"])
return item["id"]
def exists(self, path):
try:
self._get_item_id(path)
except FileNotFoundError:
return False
else:
return True
@_gdrive_retry
def info(self, path):
bucket, base = self.split_path(path)
item_id = self._get_item_id(path)
gdrive_file = self.client.CreateFile({"id": item_id})
gdrive_file.FetchMetadata()
metadata = {"name": posixpath.join(bucket, base.rstrip("/"))}
if gdrive_file["mimeType"] == FOLDER_MIME_TYPE:
metadata["type"] = "directory"
metadata["size"] = 0
metadata["name"] += "/"
else:
metadata["type"] = "file"
metadata["size"] = int(gdrive_file.get("fileSize"))
metadata["checksum"] = gdrive_file.get("md5Checksum")
return metadata
def ls(self, path, detail=False):
bucket, base = self.split_path(path)
cached = base in self._ids_cache["dirs"]
if cached:
dir_ids = self._ids_cache["dirs"][base]
else:
dir_ids = self._path_to_item_ids(base)
if not dir_ids:
return None
root_path = posixpath.join(bucket, base)
contents = []
for item in self._gdrive_list_ids(dir_ids):
item_path = posixpath.join(root_path, item["title"])
if item["mimeType"] == FOLDER_MIME_TYPE:
contents.append(
{
"type": "directory",
"name": item_path.rstrip("/") + "/",
"size": 0,
}
)
else:
contents.append(
{
"type": "file",
"name": item_path,
"size": int(item["fileSize"]),
"checksum": item.get("md5Checksum"),
}
)
if not cached:
self._cache_path_id(root_path, *dir_ids)
if detail:
return contents
else:
return [content["name"] for content in contents]
def find(self, path, detail=False, **kwargs):
bucket, base = self.split_path(path)
seen_paths = set()
dir_ids = [self._ids_cache["ids"].copy()]
contents = []
while dir_ids:
query_ids = {
dir_id: dir_name
for dir_id, dir_name in dir_ids.pop().items()
if posixpath.commonpath([base, dir_name]) == base
if dir_id not in seen_paths
}
if not query_ids:
continue
seen_paths |= query_ids.keys()
new_query_ids = {}
dir_ids.append(new_query_ids)
for item in self._gdrive_list_ids(query_ids):
parent_id = item["parents"][0]["id"]
item_path = posixpath.join(query_ids[parent_id], item["title"])
if item["mimeType"] == FOLDER_MIME_TYPE:
new_query_ids[item["id"]] = item_path
self._cache_path_id(item_path, item["id"])
continue
contents.append(
{
"name": posixpath.join(bucket, item_path),
"type": "file",
"size": int(item["fileSize"]),
"checksum": item.get("md5Checksum"),
}
)
if detail:
return {content["name"]: content for content in contents}
else:
return [content["name"] for content in contents]
def upload_fobj(self, stream, rpath, callback=None, **kwargs):
parent_id = self._get_item_id(self._parent(rpath), create=True)
if callback:
stream = CallbackIOWrapper(
callback.relative_update, stream, "read"
)
return self._gdrive_upload_fobj(
posixpath.basename(rpath.rstrip("/")), parent_id, stream
)
def put_file(self, lpath, rpath, callback=None, **kwargs):
if callback:
callback.set_size(os.path.getsize(lpath))
with open(lpath, "rb") as stream:
self.upload_fobj(stream, rpath, callback=callback)
@_gdrive_retry
def _gdrive_upload_fobj(self, title, parent_id, stream, callback=None):
item = self.client.CreateFile(
{"title": title, "parents": [{"id": parent_id}]}
)
item.content = stream
item.Upload()
return item
def cp_file(self, lpath, rpath, **kwargs):
"""In-memory streamed copy"""
with self.open(lpath) as stream:
# IterStream objects doesn't support full-length
# seek() calls, so we have to wrap the data with
# an external buffer.
buffer = io.BytesIO(stream.read())
self.upload_fobj(buffer, rpath)
@_gdrive_retry
def mv(self, path1, path2, maxdepth=None, **kwargs):
if maxdepth is not None:
raise NotImplementedError("Max depth move is not supported")
src_name = posixpath.basename(path1)
src_parent = self._parent(path1)
if self.exists(path2):
dst_name = src_name
dst_parent = path2
else:
dst_name = posixpath.basename(path2)
dst_parent = self._parent(path2)
file1_id = self._get_item_id(path1)
file1 = self.client.CreateFile({"id": file1_id})
if src_name != dst_name:
file1["title"] = dst_name
if src_parent != dst_parent:
file2_parent_id = self._get_item_id(dst_parent)
file1["parents"] = [{"id": file2_parent_id}]
# TODO need to invalidate the cache for the old path, see #232
file1.Upload()
def get_file(self, lpath, rpath, callback=None, block_size=None, **kwargs):
item_id = self._get_item_id(lpath)
return self._gdrive_get_file(
item_id, rpath, callback=callback, block_size=block_size
)
@_gdrive_retry
def _gdrive_get_file(self, item_id, rpath, callback=None, block_size=None):
param = {"id": item_id}
# it does not create a file on the remote
gdrive_file = self.client.CreateFile(param)
extra_args = {"acknowledge_abuse": self._acknowledge_abuse}
if block_size:
extra_args["chunksize"] = block_size
if callback:
def cb(value, _):
callback.absolute_update(value)
gdrive_file.FetchMetadata(fields="fileSize")
callback.set_size(int(gdrive_file.get("fileSize")))
extra_args["callback"] = cb
gdrive_file.GetContentFile(rpath, **extra_args)
def _open(self, path, mode, **kwargs):
assert mode in {"rb", "wb"}
if mode == "wb":
return GDriveBufferedWriter(self, path)
else:
item_id = self._get_item_id(path)
return self._gdrive_open_file(item_id)
@_gdrive_retry
def _gdrive_open_file(self, item_id):
param = {"id": item_id}
# it does not create a file on the remote
gdrive_file = self.client.CreateFile(param)
fd = gdrive_file.GetContentIOBuffer(
acknowledge_abuse=self._acknowledge_abuse
)
return IterStream(iter(fd))
def rm_file(self, path):
item_id = self._get_item_id(path)
self._gdrive_delete_file(item_id)
@_gdrive_retry
def _gdrive_delete_file(self, item_id):
from pydrive2.files import ApiRequestError
param = {"id": item_id}
# it does not create a file on the remote
item = self.client.CreateFile(param)
try:
item.Trash() if self._trash_only else item.Delete()
except ApiRequestError as exc:
http_error_code = exc.error.get("code", 0)
if (
http_error_code == 403
and self._list_params["corpora"] == "drive"
and exc.GetField("location") == "file.permissions"
):
raise PermissionError(
"Insufficient permissions to {}. You should have {} "
"access level for the used shared drive. More details "
"at {}.".format(
"move the file into Trash"
if self._trash_only
else "permanently delete the file",
"Manager or Content Manager"
if self._trash_only
else "Manager",
"https://support.google.com/a/answer/7337554",
)
) from exc
raise
class GDriveBufferedWriter(io.IOBase):
def __init__(self, fs, path):
self.fs = fs
self.path = path
self.buffer = io.BytesIO()
self._closed = False
def write(self, *args, **kwargs):
self.buffer.write(*args, **kwargs)
def readable(self):
return False
def writable(self):
return not self.readable()
def flush(self):
self.buffer.flush()
try:
self.fs.upload_fobj(self.buffer, self.path)
finally:
self._closed = True
def close(self):
if self._closed:
return None
self.flush()
self.buffer.close()
self._closed = True
def __enter__(self):
return self
def __exit__(self, *exc_info):
self.close()
@property
def closed(self):
return self._closed
PyDrive2-1.15.0/pydrive2/fs/utils.py 0000664 0000000 0000000 00000003067 14334672560 0017150 0 ustar 00root root 0000000 0000000 import io
class IterStream(io.RawIOBase):
"""Wraps an iterator yielding bytes as a file object"""
def __init__(self, iterator): # pylint: disable=super-init-not-called
self.iterator = iterator
self.leftover = b""
def readable(self):
return True
def writable(self) -> bool:
return False
# Python 3 requires only .readinto() method, it still uses other ones
# under some circumstances and falls back if those are absent. Since
# iterator already constructs byte strings for us, .readinto() is not the
# most optimal, so we provide .read1() too.
def readinto(self, b):
try:
n = len(b) # We're supposed to return at most this much
chunk = self.leftover or next(self.iterator)
output, self.leftover = chunk[:n], chunk[n:]
n_out = len(output)
b[:n_out] = output
return n_out
except StopIteration:
return 0 # indicate EOF
readinto1 = readinto
def read1(self, n=-1):
try:
chunk = self.leftover or next(self.iterator)
except StopIteration:
return b""
# Return an arbitrary number or bytes
if n <= 0:
self.leftover = b""
return chunk
output, self.leftover = chunk[:n], chunk[n:]
return output
def peek(self, n):
while len(self.leftover) < n:
try:
self.leftover += next(self.iterator)
except StopIteration:
break
return self.leftover[:n]
PyDrive2-1.15.0/pydrive2/settings.py 0000664 0000000 0000000 00000014202 14334672560 0017231 0 ustar 00root root 0000000 0000000 from yaml import load
from yaml import YAMLError
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
SETTINGS_FILE = "settings.yaml"
SETTINGS_STRUCT = {
"client_config_backend": {
"type": str,
"required": True,
"default": "file",
"dependency": [
{"value": "file", "attribute": ["client_config_file"]},
{"value": "settings", "attribute": ["client_config"]},
{"value": "service", "attribute": ["service_config"]},
],
},
"save_credentials": {
"type": bool,
"required": True,
"default": False,
"dependency": [
{"value": True, "attribute": ["save_credentials_backend"]}
],
},
"get_refresh_token": {"type": bool, "required": False, "default": False},
"client_config_file": {
"type": str,
"required": False,
"default": "client_secrets.json",
},
"save_credentials_backend": {
"type": str,
"required": False,
"dependency": [
{"value": "file", "attribute": ["save_credentials_file"]},
{"value": "dictionary", "attribute": ["save_credentials_dict"]},
{"value": "dictionary", "attribute": ["save_credentials_key"]},
],
},
"client_config": {
"type": dict,
"required": False,
"struct": {
"client_id": {"type": str, "required": True},
"client_secret": {"type": str, "required": True},
"auth_uri": {
"type": str,
"required": True,
"default": "https://accounts.google.com/o/oauth2/auth",
},
"token_uri": {
"type": str,
"required": True,
"default": "https://accounts.google.com/o/oauth2/token",
},
"redirect_uri": {
"type": str,
"required": True,
"default": "urn:ietf:wg:oauth:2.0:oob",
},
"revoke_uri": {"type": str, "required": True, "default": None},
},
},
"service_config": {
"type": dict,
"required": False,
"struct": {
"client_user_email": {
"type": str,
"required": True,
"default": None,
},
"client_service_email": {"type": str, "required": False},
"client_pkcs12_file_path": {"type": str, "required": False},
"client_json_file_path": {"type": str, "required": False},
"client_json_dict": {
"type": dict,
"required": False,
"struct": {},
},
"client_json": {"type": str, "required": False},
},
},
"oauth_scope": {
"type": list,
"required": True,
"struct": str,
"default": ["https://www.googleapis.com/auth/drive"],
},
"save_credentials_file": {"type": str, "required": False},
"save_credentials_dict": {"type": dict, "required": False, "struct": {}},
"save_credentials_key": {"type": str, "required": False},
}
class SettingsError(IOError):
"""Error while loading/saving settings"""
class InvalidConfigError(IOError):
"""Error trying to read client configuration."""
def LoadSettingsFile(filename=SETTINGS_FILE):
"""Loads settings file in yaml format given file name.
:param filename: path for settings file. 'settings.yaml' by default.
:type filename: str.
:raises: SettingsError
"""
try:
with open(filename) as stream:
data = load(stream, Loader=Loader)
except (YAMLError, OSError) as e:
raise SettingsError(e)
return data
def ValidateSettings(data):
"""Validates if current settings is valid.
:param data: dictionary containing all settings.
:type data: dict.
:raises: InvalidConfigError
"""
_ValidateSettingsStruct(data, SETTINGS_STRUCT)
def _ValidateSettingsStruct(data, struct):
"""Validates if provided data fits provided structure.
:param data: dictionary containing settings.
:type data: dict.
:param struct: dictionary containing structure information of settings.
:type struct: dict.
:raises: InvalidConfigError
"""
# Validate required elements of the setting.
for key in struct:
if struct[key]["required"]:
_ValidateSettingsElement(data, struct, key)
def _ValidateSettingsElement(data, struct, key):
"""Validates if provided element of settings data fits provided structure.
:param data: dictionary containing settings.
:type data: dict.
:param struct: dictionary containing structure information of settings.
:type struct: dict.
:param key: key of the settings element to validate.
:type key: str.
:raises: InvalidConfigError
"""
# Check if data exists. If not, check if default value exists.
value = data.get(key)
data_type = struct[key]["type"]
if value is None:
try:
default = struct[key]["default"]
except KeyError:
raise InvalidConfigError("Missing required setting %s" % key)
else:
data[key] = default
# If data exists, Check type of the data
elif type(value) is not data_type:
raise InvalidConfigError(f"Setting {key} should be type {data_type}")
# If type of this data is dict, check if structure of the data is valid.
if data_type is dict:
_ValidateSettingsStruct(data[key], struct[key]["struct"])
# If type of this data is list, check if all values in the list is valid.
elif data_type is list:
for element in data[key]:
if type(element) is not struct[key]["struct"]:
raise InvalidConfigError(
"Setting %s should be list of %s"
% (key, struct[key]["struct"])
)
# Check dependency of this attribute.
dependencies = struct[key].get("dependency")
if dependencies:
for dependency in dependencies:
if value == dependency["value"]:
for reqkey in dependency["attribute"]:
_ValidateSettingsElement(data, struct, reqkey)
PyDrive2-1.15.0/pydrive2/test/ 0000775 0000000 0000000 00000000000 14334672560 0015777 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/test/.gitignore 0000664 0000000 0000000 00000000072 14334672560 0017766 0 ustar 00root root 0000000 0000000 credentials/*
settings/local/*
client_secrets.json
*.p12
PyDrive2-1.15.0/pydrive2/test/README.rst 0000664 0000000 0000000 00000005400 14334672560 0017465 0 ustar 00root root 0000000 0000000 Run tests locally
-----------------
1. Copy settings files to the :code:`pydrive2/test/settings/local` directory:
::
cd pydrive2/test/settings && cp *.yaml local
2. Setup a Google service account for your Google Cloud Project:
- Sign into the `Google API Console
`_
- Select or `Create a new
`_
project.
- `Enable the Drive API
`_ from the **APIs &
Services** **Dashboard** (left sidebar), click on **+ ENABLE APIS AND
SERVICES**. Find and select the "Google Drive API" in the API Library, and
click on the **ENABLE** button.
- Go back to **IAM & Admin** in the left
sidebar, and select **Service Accounts**. Click **+ CREATE SERVICE
ACCOUNT**, on the next screen, enter **Service account name** e.g. "PyDrive
tests", and click **Create**. Select **Continue** at the next **Service
account permissions** page, click at **+ CREATE KEY**, select **JSON** and
**Create**. Save generated :code:`.json` key file at your local disk.
- Copy downloaded :code:`json` file to :code:`/tmp/pydrive2/credentials.json`
directory.
3. Optional. If you would like to use your own an OAuth client ID follow the steps:
- Under `Google API Console `_ select
**APIs & Services** from the left sidebar, and select **OAuth consent screen**.
Chose a **User Type** and click **CREATE**. On the next screen, enter an
**Application name** e.g. "PyDrive tests", and click the **Save** (scroll to
bottom).
- From the left sidebar, select **Credentials**, and click the
**Create credentials** dropdown to select **OAuth client ID**. Chose **Other**
and click **Create** to proceed with a default client name. At **Credentials**
screen find a list of your **OAuth 2.0 Client IDs**, click download icon in
front of your OAuth client id created previously. You should be prompted to
download :code:`client_secret_xxx_.json` file.
- Copy downloaded :code:`.json` file into :code:`pydrive2/test` directory
and rename to :code:`client_secrets.json`.
- Replace {{ }} sections
in :code:`pydrive2/test/settings/local/test_oauth_test_02.yaml` with the
relevant values of :code:`client_id` and :code:`client_secret` from your
**client_secrets.json** file.
4. Setup virtual environment (recommended optional step):
::
virtualenv -p python .env
source .env/bin/activate
5. Install :code:`tests` deps from the root directory of the project:
::
pip install -e .[tests,fsspec]
5. Run tests:
::
py.test -v -s
PyDrive2-1.15.0/pydrive2/test/__init__.py 0000664 0000000 0000000 00000000000 14334672560 0020076 0 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/test/client_secrets.json 0000664 0000000 0000000 00000000604 14334672560 0021700 0 ustar 00root root 0000000 0000000 {"web":{"client_id":"47794215776-et2ir6ngpul4m4pn95tnfrtvuuahrvpt.apps.googleusercontent.com","project_id":"dvc-pydrive","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"FWSfDKs2i_0z_KQEoHAfqU2G","redirect_uris":["http://localhost:8080/"]}} PyDrive2-1.15.0/pydrive2/test/credentials/ 0000775 0000000 0000000 00000000000 14334672560 0020274 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/test/credentials/.keepme 0000664 0000000 0000000 00000000000 14334672560 0021531 0 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/test/settings/ 0000775 0000000 0000000 00000000000 14334672560 0017637 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/test/settings/default.yaml 0000664 0000000 0000000 00000000405 14334672560 0022146 0 ustar 00root root 0000000 0000000 client_config_backend: service
service_config:
client_json_file_path: /tmp/pydrive2/credentials.json
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/default.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/local/ 0000775 0000000 0000000 00000000000 14334672560 0020731 5 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/test/settings/local/.keepme 0000664 0000000 0000000 00000000000 14334672560 0022166 0 ustar 00root root 0000000 0000000 PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_default.yaml 0000664 0000000 0000000 00000000334 14334672560 0024406 0 ustar 00root root 0000000 0000000 client_config_backend: file
client_config_file: client_secrets.json
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/1.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_01.yaml 0000664 0000000 0000000 00000000334 14334672560 0024241 0 ustar 00root root 0000000 0000000 client_config_backend: file
client_config_file: client_secrets.json
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/1.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_02.yaml 0000664 0000000 0000000 00000000506 14334672560 0024243 0 ustar 00root root 0000000 0000000 client_config_backend: settings
client_config:
client_id: 47794215776-cd9ssb6a4vv5otkq6n0iadpgc4efgjb1.apps.googleusercontent.com
client_secret: i2gerGA7uBjZbR08HqSOSt9Z
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/2.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_03.yaml 0000664 0000000 0000000 00000000225 14334672560 0024242 0 ustar 00root root 0000000 0000000 client_config_backend: file
client_config_file: client_secrets.json
save_credentials: False
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_04.yaml 0000664 0000000 0000000 00000000334 14334672560 0024244 0 ustar 00root root 0000000 0000000 client_config_backend: file
client_config_file: client_secrets.json
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/4.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_05.yaml 0000664 0000000 0000000 00000000104 14334672560 0024240 0 ustar 00root root 0000000 0000000 client_config_backend: file
client_config_file: client_secrets.json
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_06.yaml 0000664 0000000 0000000 00000000450 14334672560 0024245 0 ustar 00root root 0000000 0000000 client_config_backend: service
service_config:
client_service_email: your-service-account-email
client_pkcs12_file_path: your-file-path.p12
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/6.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_07.yaml 0000664 0000000 0000000 00000000377 14334672560 0024256 0 ustar 00root root 0000000 0000000 client_config_backend: service
service_config:
client_json_file_path: /tmp/pydrive2/credentials.json
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/7.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_08.yaml 0000664 0000000 0000000 00000000270 14334672560 0024247 0 ustar 00root root 0000000 0000000 client_config_backend: service
service_config:
client_json_file_path: /tmp/pydrive2/credentials.json
save_credentials: False
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/settings/test_oauth_test_09.yaml 0000664 0000000 0000000 00000000377 14334672560 0024260 0 ustar 00root root 0000000 0000000 client_config_backend: service
service_config:
client_json_file_path: /tmp/pydrive2/credentials.json
save_credentials: True
save_credentials_backend: file
save_credentials_file: credentials/9.dat
oauth_scope:
- https://www.googleapis.com/auth/drive
PyDrive2-1.15.0/pydrive2/test/test_apiattr.py 0000664 0000000 0000000 00000002160 14334672560 0021053 0 ustar 00root root 0000000 0000000 import unittest
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
from pydrive2.test.test_util import (
pydrive_retry,
setup_credentials,
settings_file_path,
)
class ApiAttributeTest(unittest.TestCase):
"""Test ApiAttr functions."""
@classmethod
def setup_class(cls):
setup_credentials()
def test_UpdateMetadataNotInfinitelyNesting(self):
# Verify 'metadata' field present.
self.assertTrue(self.file1.metadata is not None)
pydrive_retry(self.file1.UpdateMetadata)
# Verify 'metadata' field still present.
self.assertTrue(self.file1.metadata is not None)
# Ensure no 'metadata' field in 'metadata' (i.e. nested).
self.assertTrue("metadata" not in self.file1.metadata)
def setUp(self):
ga = GoogleAuth(settings_file_path("default.yaml"))
ga.ServiceAuth()
self.drive = GoogleDrive(ga)
self.file1 = self.drive.CreateFile()
pydrive_retry(self.file1.Upload)
def tearDown(self):
pydrive_retry(self.file1.Delete)
if __name__ == "__main__":
unittest.main()
PyDrive2-1.15.0/pydrive2/test/test_drive.py 0000664 0000000 0000000 00000001401 14334672560 0020515 0 ustar 00root root 0000000 0000000 import unittest
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
from pydrive2.test.test_util import (
pydrive_retry,
setup_credentials,
settings_file_path,
)
class GoogleDriveTest(unittest.TestCase):
"""Tests basic operations on meta-data information of the linked Google
Drive account.
"""
@classmethod
def setup_class(cls):
setup_credentials()
cls.ga = GoogleAuth(settings_file_path("default.yaml"))
cls.ga.ServiceAuth()
def test_01_About_Request(self):
drive = GoogleDrive(self.ga)
about_object = pydrive_retry(drive.GetAbout)
self.assertTrue(about_object is not None, "About object not loading.")
if __name__ == "__main__":
unittest.main()
PyDrive2-1.15.0/pydrive2/test/test_file.py 0000664 0000000 0000000 00000103621 14334672560 0020332 0 ustar 00root root 0000000 0000000 import filecmp
import os
import unittest
import pytest
import sys
from io import BytesIO
from tempfile import mkdtemp
from time import time
import timeout_decorator
from concurrent.futures import ThreadPoolExecutor, as_completed
from googleapiclient import errors
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
from pydrive2.files import ApiRequestError, GoogleDriveFile
from pydrive2.test import test_util
from pydrive2.test.test_util import (
pydrive_retry,
setup_credentials,
create_file,
delete_dir,
delete_file,
settings_file_path,
)
class GoogleDriveFileTest(unittest.TestCase):
"""Tests basic file operations of files.GoogleDriveFile.
Upload and download of contents and metadata, and thread-safety checks.
Equivalent to Files.insert, Files.update, Files.patch in Google Drive API.
"""
@classmethod
def setup_class(cls):
setup_credentials()
cls.tmpdir = mkdtemp()
cls.ga = GoogleAuth(
settings_file_path("default.yaml", os.path.join(cls.tmpdir, ""))
)
cls.ga.ServiceAuth()
@classmethod
def tearDownClass(cls):
delete_dir(cls.tmpdir)
@classmethod
def getTempFile(cls, prefix="", content=""):
filename = os.path.join(cls.tmpdir, prefix + str(time()))
if content:
create_file(filename, content)
return filename
def test_Files_Insert(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("firsttestfile")
file1["title"] = filename
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
file2 = drive.CreateFile({"id": file1["id"]}) # Download file from id.
self.assertEqual(file2["title"], filename)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Insert_Unicode(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("첫번째 파일")
file1["title"] = filename
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
file2 = drive.CreateFile({"id": file1["id"]}) # Download file from id.
self.assertEqual(file2["title"], filename)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Insert_Content_String(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("secondtestfile")
content = "hello world!"
file1["title"] = filename
file1.SetContentString(content)
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.GetContentString(), content)
pydrive_retry(
file1.FetchContent
) # Force download and double check content
self.assertEqual(file1.metadata["title"], filename)
self.assertEqual(file1.GetContentString(), content)
file2 = drive.CreateFile({"id": file1["id"]}) # Download file from id.
pydrive_retry(file2.FetchContent)
self.assertEqual(file2.GetContentString(), content)
self.assertEqual(file2.metadata["title"], filename)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Insert_Content_Unicode_String(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("두번째 파일")
content = "안녕 세상아!"
file1["title"] = filename
file1.SetContentString(content)
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.GetContentString(), content)
self.assertEqual(file1.metadata["title"], filename)
pydrive_retry(
file1.FetchContent
) # Force download and double check content.
self.assertEqual(file1.GetContentString(), content)
file2 = drive.CreateFile({"id": file1["id"]}) # Download file from id.
pydrive_retry(file2.FetchContent)
self.assertEqual(file2.GetContentString(), content)
self.assertEqual(file2.metadata["title"], filename)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Insert_Content_File(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("filecontent")
file1["title"] = filename
contentFile = self.getTempFile("actual_content", "some string")
file1.SetContentFile(contentFile)
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
pydrive_retry(
file1.FetchContent
) # Force download and double check content.
fileOut = self.getTempFile()
pydrive_retry(file1.GetContentFile, fileOut)
self.assertEqual(filecmp.cmp(contentFile, fileOut), True)
file2 = drive.CreateFile({"id": file1["id"]}) # Download file from id.
fileOut = self.getTempFile()
pydrive_retry(file2.GetContentFile, fileOut)
self.assertEqual(filecmp.cmp(contentFile, fileOut), True)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Patch(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("prepatchtestfile")
newfilename = self.getTempFile("patchtestfile")
file1["title"] = filename
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
file1["title"] = newfilename
pydrive_retry(file1.Upload) # Files.patch
self.assertEqual(file1.metadata["title"], newfilename)
file2 = drive.CreateFile({"id": file1["id"]}) # Download file from id.
pydrive_retry(file2.FetchMetadata)
self.assertEqual(file2.metadata["title"], newfilename)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Patch_By_Id(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("prepatchtestfile")
newfilename = self.getTempFile("patchtestfile")
file1["title"] = filename
pydrive_retry(file1.Upload) # Files.insert
file2 = drive.CreateFile(
{"id": file1["id"]}
) # Patch file no download.
file2["title"] = newfilename
pydrive_retry(file2.Upload) # Files.patch
file3 = drive.CreateFile({"id": file1["id"]}) # Download file from id.
pydrive_retry(file3.FetchMetadata)
self.assertEqual(file3.metadata["title"], newfilename)
def test_Files_Patch_Skipping_Content(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("prepatchtestfile")
newfilename = self.getTempFile("patchtestfile")
content = "hello world!"
file1["title"] = filename
file1.SetContentString(content)
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
file1["title"] = newfilename
pydrive_retry(file1.Upload) # Files.patch
self.assertEqual(file1.metadata["title"], newfilename)
self.assertEqual(file1.GetContentString(), content)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Update_String(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("preupdatetestfile")
newfilename = self.getTempFile("updatetestfile")
content = "hello world!"
newcontent = "hello new world!"
file1["title"] = filename
file1.SetContentString(content)
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
self.assertEqual(file1.GetContentString(), content)
pydrive_retry(
file1.FetchContent
) # Force download and double check content.
self.assertEqual(file1.GetContentString(), content)
file1["title"] = newfilename
file1.SetContentString(newcontent)
pydrive_retry(file1.Upload) # Files.update
self.assertEqual(file1.metadata["title"], newfilename)
self.assertEqual(file1.GetContentString(), newcontent)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Update_File(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("preupdatetestfile")
newfilename = self.getTempFile("updatetestfile")
contentFile = self.getTempFile("actual_content", "some string")
contentFile2 = self.getTempFile("actual_content_2", "some string")
file1["title"] = filename
file1.SetContentFile(contentFile)
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
pydrive_retry(
file1.FetchContent
) # Force download and double check content.
fileOut = self.getTempFile()
pydrive_retry(file1.GetContentFile, fileOut)
self.assertEqual(filecmp.cmp(contentFile, fileOut), True)
file1["title"] = newfilename
file1.SetContentFile(contentFile2)
pydrive_retry(file1.Upload) # Files.update
self.assertEqual(file1.metadata["title"], newfilename)
fileOut = self.getTempFile()
pydrive_retry(file1.GetContentFile, fileOut)
self.assertEqual(filecmp.cmp(contentFile2, fileOut), True)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Update_By_Id(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("preupdatetestfile")
newfilename = self.getTempFile("updatetestfile")
content = "hello world!"
newcontent = "hello new world!"
file1["title"] = filename
file1.SetContentString(content)
pydrive_retry(file1.Upload) # Files.insert
file2 = drive.CreateFile({"id": file1["id"]})
file2["title"] = newfilename
file2.SetContentString(newcontent)
pydrive_retry(file2.Upload) # Files.update
file3 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file3.FetchContent)
self.assertEqual(file3.GetContentString(), newcontent)
self.assertEqual(file3["title"], newfilename)
def test_Files_Download_Service(self):
"""
Tests that a fresh GoogleDrive object can correctly authenticate
and download from a file ID.
"""
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile("prepatchtestfile")
content = "hello world!"
file1["title"] = filename
file1.SetContentString(content)
pydrive_retry(file1.Upload) # Files.insert
self.assertEqual(file1.metadata["title"], filename)
fileOut1 = self.getTempFile()
pydrive_retry(file1.GetContentFile, fileOut1)
# fresh download-only instance
auth = GoogleAuth(
settings_file_path("default.yaml", os.path.join(self.tmpdir, ""))
)
auth.ServiceAuth()
drive2 = GoogleDrive(auth)
file2 = drive2.CreateFile({"id": file1["id"]})
fileOut2 = self.getTempFile()
pydrive_retry(file2.GetContentFile, fileOut2)
self.assertEqual(filecmp.cmp(fileOut1, fileOut2), True)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Get_Content_Buffer(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
filename = self.getTempFile()
content = "hello world!\ngoodbye, cruel world!"
file1["title"] = filename
file1.SetContentString(content)
pydrive_retry(file1.Upload) # Files.insert
buffer1 = pydrive_retry(file1.GetContentIOBuffer)
self.assertEqual(file1.metadata["title"], filename)
self.assertEqual(len(buffer1), len(content))
self.assertEqual(b"".join(iter(buffer1)).decode("ascii"), content)
buffer2 = pydrive_retry(file1.GetContentIOBuffer, encoding="ascii")
self.assertEqual(len(buffer2), len(content))
self.assertEqual("".join(iter(buffer2)), content)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Upload_Download_Empty_File(self):
filename = os.path.join(self.tmpdir, str(time()))
create_file(filename, "")
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
file1.SetContentFile(filename)
pydrive_retry(file1.Upload)
fileOut1 = self.getTempFile()
pydrive_retry(file1.GetContentFile, fileOut1)
self.assertEqual(os.path.getsize(fileOut1), 0)
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Upload_Download_Empty_String(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
file1.SetContentString("")
pydrive_retry(file1.Upload)
self.assertEqual(pydrive_retry(file1.GetContentString), "")
# Force download and double check content
pydrive_retry(file1.FetchContent)
self.assertEqual(file1.GetContentString(), "")
# Download file from id
file2 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file2.FetchContent)
self.assertEqual(file2.GetContentString(), "")
self.DeleteUploadedFiles(drive, [file1["id"]])
# Tests for Trash/UnTrash/Delete.
# ===============================
def test_Files_Trash_File(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
self.assertFalse(file1.metadata["labels"]["trashed"])
# Download to verify non-trashed state on GDrive.
file2 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file2.FetchMetadata)
self.assertFalse(file2.metadata["labels"]["trashed"])
pydrive_retry(file1.Trash)
self.assertTrue(file1.metadata["labels"]["trashed"])
pydrive_retry(file2.FetchMetadata)
self.assertTrue(file2.metadata["labels"]["trashed"])
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Trash_File_Just_ID(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
self.assertFalse(file1.metadata["labels"]["trashed"])
# Trash file by ID.
file2 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file2.Trash)
# Verify trashed by downloading metadata.
pydrive_retry(file1.FetchMetadata)
self.assertTrue(file1.metadata["labels"]["trashed"])
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_UnTrash_File(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
pydrive_retry(file1.Trash)
self.assertTrue(file1.metadata["labels"]["trashed"])
# Verify that file is trashed by downloading metadata.
file2 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file2.FetchMetadata)
self.assertTrue(file2.metadata["labels"]["trashed"])
# Un-trash the file, and assert local metadata is updated correctly.
pydrive_retry(file1.UnTrash)
self.assertFalse(file1.metadata["labels"]["trashed"])
# Re-fetch the metadata, and assert file un-trashed on GDrive.
pydrive_retry(file2.FetchMetadata)
self.assertFalse(file2.metadata["labels"]["trashed"])
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_UnTrash_File_Just_ID(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
pydrive_retry(file1.Trash)
self.assertTrue(file1.metadata["labels"]["trashed"])
file2 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file2.UnTrash) # UnTrash without fetching metadata.
pydrive_retry(file1.FetchMetadata)
self.assertFalse(file1.metadata["labels"]["trashed"])
self.DeleteUploadedFiles(drive, [file1["id"]])
def test_Files_Delete_File(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
file2 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file1.Delete)
try:
pydrive_retry(file2.FetchMetadata)
self.fail("File not deleted correctly.")
except ApiRequestError:
pass
def test_Files_Delete_File_Just_ID(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
file2 = drive.CreateFile({"id": file1["id"]})
pydrive_retry(file2.Delete)
try:
pydrive_retry(file1.FetchMetadata)
self.fail("File not deleted correctly.")
except ApiRequestError:
pass
# Tests for Permissions.
# ======================
def test_Files_FetchMetadata_Fields(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
self.assertFalse("permissions" in file1)
pydrive_retry(file1.FetchMetadata, "permissions")
self.assertTrue("permissions" in file1)
pydrive_retry(file1.Delete)
def test_Files_FetchAllMetadata_Fields(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
pydrive_retry(file1.FetchMetadata, fetch_all=True)
self.assertTrue("hasThumbnail" in file1)
self.assertTrue("thumbnailVersion" in file1)
self.assertTrue("permissions" in file1)
pydrive_retry(file1.Delete)
def test_Files_Insert_Permission(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
# Verify only one permission before inserting permission.
permissions = pydrive_retry(file1.GetPermissions)
self.assertEqual(len(permissions), 1)
self.assertEqual(len(file1["permissions"]), 1)
# Insert the permission.
permission = pydrive_retry(
file1.InsertPermission,
{"type": "anyone", "value": "anyone", "role": "reader"},
)
self.assertTrue(permission)
self.assertEqual(len(file1["permissions"]), 2)
self.assertEqual(file1["permissions"][0]["type"], "anyone")
permissions = pydrive_retry(file1.GetPermissions)
self.assertEqual(len(file1["permissions"]), 2)
self.assertEqual(file1["permissions"][0]["type"], "anyone")
self.assertEqual(permissions[0]["type"], "anyone")
# Verify remote changes made.
file2 = drive.CreateFile({"id": file1["id"]})
permissions = pydrive_retry(file2.GetPermissions)
self.assertEqual(len(permissions), 2)
self.assertEqual(permissions[0]["type"], "anyone")
pydrive_retry(file1.Delete)
def test_Files_Get_Permissions(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
self.assertFalse("permissions" in file1)
permissions = pydrive_retry(file1.GetPermissions)
self.assertTrue(permissions is not None)
self.assertTrue("permissions" in file1)
pydrive_retry(file1.Delete)
def test_Files_Delete_Permission(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
pydrive_retry(
file1.InsertPermission,
{"type": "anyone", "value": "anyone", "role": "reader"},
)
permissions = pydrive_retry(file1.GetPermissions)
self.assertEqual(len(permissions), 2)
self.assertEqual(len(file1["permissions"]), 2)
pydrive_retry(file1.DeletePermission, permissions[0]["id"])
self.assertEqual(len(file1["permissions"]), 1)
# Verify remote changes made.
file2 = drive.CreateFile({"id": file1["id"]})
permissions = pydrive_retry(file2.GetPermissions)
self.assertEqual(len(permissions), 1)
pydrive_retry(file1.Delete)
def test_Files_Delete_Permission_Invalid(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
pydrive_retry(file1.Upload)
try:
pydrive_retry(file1.DeletePermission, "invalid id")
self.fail("Deleting invalid permission not raising exception.")
except ApiRequestError:
pass
pydrive_retry(file1.Delete)
def test_ApiRequestError_HttpError_Propagation(self):
file = GoogleDrive(self.ga).CreateFile()
pydrive_retry(file.Upload)
try:
pydrive_retry(file.DeletePermission, "invalid id")
self.fail("Deleting invalid permission not raising exception.")
except ApiRequestError as exc:
self.assertTrue(
exc.args and isinstance(exc.args[0], errors.HttpError)
)
self.assertTrue(exc.error is not None)
# Validating for HttpError 404 "Permission not found: invalid id"
self.assertTrue(exc.error["code"] == 404)
finally:
pydrive_retry(file.Delete)
def test_GFile_Conversion_Lossless_String(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
# Upload a string, and convert into Google Doc format.
test_string = "Generic, non-exhaustive ASCII test string."
file1.SetContentString(test_string)
pydrive_retry(file1.Upload, {"convert": True})
# Download string as plain text.
downloaded_string = file1.GetContentString(mimetype="text/plain")
self.assertEqual(
test_string, downloaded_string, "Strings do not match"
)
# Download content into file and ensure that file content matches original
# content string.
downloaded_file_name = "_tmp_downloaded_file_name.txt"
pydrive_retry(
file1.GetContentFile,
downloaded_file_name,
mimetype="text/plain",
remove_bom=True,
)
downloaded_string = open(downloaded_file_name).read()
self.assertEqual(
test_string, downloaded_string, "Strings do not match"
)
# Delete temp file.
delete_file(downloaded_file_name)
# Tests for GDrive conversion.
# ============================
def setup_gfile_conversion_test(self):
drive = GoogleDrive(self.ga)
file1 = drive.CreateFile()
# Create a file to upload.
file_name = "_tmp_source_file.txt"
downloaded_file_name = "_tmp_downloaded_file_name.txt"
original_file_content = "Generic, non-exhaustive\n ASCII test string."
source_file = open(file_name, mode="w+")
source_file.write(original_file_content)
source_file.close()
original_file_content = test_util.StripNewlines(original_file_content)
return file1, file_name, original_file_content, downloaded_file_name
def cleanup_gfile_conversion_test(
self, file1, file_name, downloaded_file_name
):
# Delete temporary files.
os.path.exists(file_name) and os.remove(file_name)
os.path.exists(downloaded_file_name) and os.remove(
downloaded_file_name
)
pydrive_retry(file1.Delete) # Delete uploaded file.
def test_GFile_Conversion_Remove_BOM(self):
(
file1,
file_name,
original_file_content,
downloaded_file_name,
) = self.setup_gfile_conversion_test()
try:
# Upload source_file and convert into Google Doc format.
file1.SetContentFile(file_name)
pydrive_retry(file1.Upload, {"convert": True})
# Download as string.
downloaded_content_no_bom = file1.GetContentString(
mimetype="text/plain", remove_bom=True
)
downloaded_content_no_bom = test_util.StripNewlines(
downloaded_content_no_bom
)
self.assertEqual(original_file_content, downloaded_content_no_bom)
# Download as file.
pydrive_retry(
file1.GetContentFile, downloaded_file_name, remove_bom=True
)
downloaded_content = open(downloaded_file_name).read()
downloaded_content = test_util.StripNewlines(downloaded_content)
self.assertEqual(original_file_content, downloaded_content)
finally:
self.cleanup_gfile_conversion_test(
file1, file_name, downloaded_file_name
)
def test_Gfile_Conversion_Add_Remove_BOM(self):
"""Tests whether you can switch between the BOM appended and removed
version on the fly."""
(
file1,
file_name,
original_file_content,
downloaded_file_name,
) = self.setup_gfile_conversion_test()
try:
file1.SetContentFile(file_name)
pydrive_retry(file1.Upload, {"convert": True})
content_bom = file1.GetContentString(mimetype="text/plain")
content_no_bom = file1.GetContentString(
mimetype="text/plain", remove_bom=True
)
content_bom_2 = file1.GetContentString(mimetype="text/plain")
self.assertEqual(content_bom, content_bom_2)
self.assertNotEqual(content_bom, content_no_bom)
self.assertTrue(len(content_bom) > len(content_no_bom))
buffer_bom = pydrive_retry(
file1.GetContentIOBuffer,
mimetype="text/plain",
encoding="utf-8",
)
buffer_bom = "".join(iter(buffer_bom))
self.assertEqual(content_bom, buffer_bom)
buffer_no_bom = pydrive_retry(
file1.GetContentIOBuffer,
mimetype="text/plain",
remove_bom=True,
encoding="utf-8",
)
buffer_no_bom = "".join(iter(buffer_no_bom))
self.assertEqual(content_no_bom, buffer_no_bom)
finally:
self.cleanup_gfile_conversion_test(
file1, file_name, downloaded_file_name
)
def test_InsertPrefix(self):
# Create BytesIO.
file_obj = BytesIO(b"abc")
original_length = len(file_obj.getvalue())
char_to_insert = "\ufeff".encode()
# Insert the prefix.
GoogleDriveFile._InsertPrefix(file_obj, char_to_insert)
modified_length = len(file_obj.getvalue())
self.assertGreater(modified_length, original_length)
self.assertEqual(file_obj.getvalue(), "\ufeffabc".encode())
def test_InsertPrefixLarge(self):
# Create BytesIO.
test_content = "abc" * 800
file_obj = BytesIO(test_content.encode("utf-8"))
original_length = len(file_obj.getvalue())
char_to_insert = "\ufeff".encode()
# Insert the prefix.
GoogleDriveFile._InsertPrefix(file_obj, char_to_insert)
modified_length = len(file_obj.getvalue())
self.assertGreater(modified_length, original_length)
expected_content = "\ufeff" + test_content
self.assertEqual(file_obj.getvalue(), expected_content.encode("utf8"))
def test_RemovePrefix(self):
# Create BytesIO.
file_obj = BytesIO("\ufeffabc".encode())
original_length = len(file_obj.getvalue())
char_to_remove = "\ufeff".encode()
# Insert the prefix.
GoogleDriveFile._RemovePrefix(file_obj, char_to_remove)
modified_length = len(file_obj.getvalue())
self.assertLess(modified_length, original_length)
self.assertEqual(file_obj.getvalue(), b"abc")
def test_RemovePrefixLarge(self):
# Create BytesIO.
test_content = "\ufeff" + "abc" * 800
file_obj = BytesIO(test_content.encode("utf8"))
original_length = len(file_obj.getvalue())
char_to_remove = "\ufeff".encode()
# Insert the prefix.
GoogleDriveFile._RemovePrefix(file_obj, char_to_remove)
modified_length = len(file_obj.getvalue())
self.assertLess(modified_length, original_length)
self.assertEqual(file_obj.getvalue(), test_content[1:].encode("utf8"))
# Setup for concurrent upload testing.
# =====================================
FILE_UPLOAD_COUNT = 10
def _parallel_uploader(self, num_of_uploads, num_of_workers):
"""
:returns: list[str] of file IDs
"""
drive = GoogleDrive(self.ga)
thread_pool = ThreadPoolExecutor(max_workers=num_of_workers)
first_file = self.getTempFile("first_file", "some string")
second_file = self.getTempFile("second_file", "another string")
# Create list of gdrive_files.
upload_files = []
remote_name = test_util.CreateRandomFileName()
for i in range(num_of_uploads):
file_name = first_file if i % 2 == 0 else second_file
up_file = drive.CreateFile()
up_file["title"] = remote_name
up_file.SetContentFile(file_name)
upload_files.append(up_file)
# Ensure there are no files with the random file name.
files = pydrive_retry(
lambda: drive.ListFile(
param={"q": "title = '%s' and trashed = false" % remote_name}
).GetList()
)
self.assertTrue(len(files) == 0)
# Submit upload jobs to ThreadPoolExecutor.
futures = []
for up_file in upload_files:
futures.append(thread_pool.submit(pydrive_retry, up_file.Upload))
# Ensure that all threads a) return, and b) encountered no exceptions.
for future in as_completed(futures):
self.assertIsNone(future.exception())
thread_pool.shutdown()
# Ensure all files were uploaded.
files = pydrive_retry(
lambda: drive.ListFile(
param={"q": "title = '%s' and trashed = false" % remote_name}
).GetList()
)
self.assertTrue(len(files) == self.FILE_UPLOAD_COUNT)
return [fi["id"] for fi in upload_files]
def _parallel_downloader(self, file_ids, num_of_workers):
drive = GoogleDrive(self.ga)
thread_pool = ThreadPoolExecutor(max_workers=num_of_workers)
# Create list of gdrive_files.
download_files = []
for file_id in file_ids:
file1 = drive.CreateFile({"id": file_id})
file1["title"] = self.getTempFile()
download_files.append(file1)
# Ensure files don't exist yet.
for file_obj in download_files:
self.assertTrue(not delete_file(file_obj["title"]))
# Submit upload jobs to ThreadPoolExecutor.
futures = []
for file_obj in download_files:
futures.append(
thread_pool.submit(
pydrive_retry, file_obj.GetContentFile, file_obj["title"]
)
)
# Ensure that all threads a) return, and b) encountered no exceptions.
for future in as_completed(futures):
self.assertIsNone(future.exception())
thread_pool.shutdown()
# Ensure all files were downloaded.
for file_obj in download_files:
self.assertTrue(delete_file(file_obj["title"]))
# Remove uploaded files.
self.DeleteUploadedFiles(drive, file_ids)
@pytest.mark.skipif(
sys.platform == "win32",
reason="timeout_decorator doesn't support Windows",
)
@timeout_decorator.timeout(320)
def test_Parallel_Insert_File_Passed_HTTP(self):
files = self._parallel_uploader(self.FILE_UPLOAD_COUNT, 10)
self._parallel_downloader(files, 10)
# Tests for Copy file.
# ====================
def test_CopyFileSameFolder(self):
drive = GoogleDrive(self.ga)
content = "hello world!"
# create a temp file and set it's content to a known string
file1 = drive.CreateFile()
filename = self.getTempFile("copytestfile", content=content)
file1["title"] = filename
file1.SetContentFile(filename)
pydrive_retry(file1.Upload)
# copy the file
file2 = pydrive_retry(file1.Copy, new_title="copytestfile_copy")
self.assertIsNotNone(file2)
pydrive_retry(file2.FetchContent)
# assert that the content of the copied file is the same as the original (file1)
self.assertEqual(file2.GetContentString(), content)
self.DeleteUploadedFiles(drive, [file1["id"], file2["id"]])
def test_CopyFileDifferentFolder(self):
drive = GoogleDrive(self.ga)
content = "hello world!"
# create a temp file and set it's content to a known string
file1 = drive.CreateFile()
filename = self.getTempFile("copytestfile", content=content)
file1["title"] = filename
file1.SetContentFile(filename)
pydrive_retry(file1.Upload)
# create a temp directory
temp_dir = pydrive_retry(
drive.CreateFile,
{
"title": "temp_dir",
"mimeType": "application/vnd.google-apps.folder",
"parents": [{"id": file1["parents"][0]["id"]}],
},
)
pydrive_retry(temp_dir.Upload)
# copy the file into the new folder
file2 = pydrive_retry(
file1.Copy, target_folder=temp_dir, new_title="copytestfile_copy"
)
self.assertIsNotNone(file2)
pydrive_retry(file2.FetchContent)
# assert that the content of the copied file is the same as the original (file1)
self.assertEqual(file2.GetContentString(), content)
files = pydrive_retry(
drive.ListFile, {"q": f"'{temp_dir['id']}' in parents"}
).GetList()
self.assertIn("copytestfile_copy", [f["title"] for f in files])
self.DeleteUploadedFiles(
drive, [file1["id"], file2["id"], temp_dir["id"]]
)
# Helper functions.
# =================
def DeleteUploadedFiles(self, drive, ids):
for element in ids:
tmp_file = drive.CreateFile({"id": element})
pydrive_retry(tmp_file.Delete)
if __name__ == "__main__":
unittest.main()
PyDrive2-1.15.0/pydrive2/test/test_filelist.py 0000664 0000000 0000000 00000007352 14334672560 0021232 0 ustar 00root root 0000000 0000000 import os
import unittest
from pydrive2.auth import GoogleAuth
from pydrive2.drive import GoogleDrive
from pydrive2.test import test_util
from pydrive2.test.test_util import (
pydrive_retry,
pydrive_list_item,
setup_credentials,
settings_file_path,
)
class GoogleDriveFileListTest(unittest.TestCase):
"""Tests operations of files.GoogleDriveFileList class.
Equivalent to Files.list in Google Drive API.
"""
@classmethod
def setup_class(cls):
setup_credentials()
cls.ga = GoogleAuth(settings_file_path("default.yaml"))
cls.ga.ServiceAuth()
cls.drive = GoogleDrive(cls.ga)
def test_01_Files_List_GetList(self):
drive = GoogleDrive(self.ga)
query = f"title = '{self.title}' and trashed = false"
for file1 in pydrive_list_item(drive, query):
found = False
for file2 in pydrive_list_item(drive, query):
if file1["id"] == file2["id"]:
found = True
self.assertEqual(found, True)
def test_02_Files_List_ForLoop(self):
drive = GoogleDrive(self.ga)
query = f"title = '{self.title}' and trashed = false"
files = []
for x in pydrive_list_item(
drive, query, 2
): # Build iterator to access files simply with for loop
files.append(x)
for file1 in self.file_list:
found = False
for file2 in files:
if file1["id"] == file2["id"]:
found = True
self.assertEqual(found, True)
def test_03_Files_List_GetList_Iterate(self):
drive = GoogleDrive(self.ga)
flist = drive.ListFile(
{
"q": "title = '%s' and trashed = false" % self.title,
"maxResults": 2,
}
)
files = []
while True:
try:
x = pydrive_retry(flist.GetList)
self.assertTrue(len(x) <= 2)
files.extend(x)
except StopIteration:
break
for file1 in self.file_list:
found = False
for file2 in files:
if file1["id"] == file2["id"]:
found = True
self.assertEqual(found, True)
def test_File_List_Folders(self):
drive = GoogleDrive(self.ga)
folder1 = drive.CreateFile(
{
"mimeType": "application/vnd.google-apps.folder",
"title": self.title,
}
)
pydrive_retry(folder1.Upload)
self.file_list.append(folder1)
query = f"title = '{self.title}' and trashed = false"
count = 0
for file1 in pydrive_list_item(drive, query):
self.assertFileInFileList(file1)
count += 1
self.assertTrue(count == 11)
# setUp and tearDown methods.
# ===========================
def setUp(self):
title = test_util.CreateRandomFileName()
file_list = []
for x in range(0, 10):
file1 = self.drive.CreateFile()
file1["title"] = title
pydrive_retry(file1.Upload)
file_list.append(file1)
self.title = title
self.file_list = file_list
def tearDown(self):
# Deleting uploaded files.
for file1 in self.file_list:
pydrive_retry(file1.Delete)
def assertFileInFileList(self, file_object):
found = False
for file1 in self.file_list:
if file_object["id"] == file1["id"]:
found = True
self.assertEqual(found, True)
def DeleteOldFile(self, file_name):
try:
os.remove(file_name)
except OSError:
pass
if __name__ == "__main__":
unittest.main()
PyDrive2-1.15.0/pydrive2/test/test_fs.py 0000664 0000000 0000000 00000015601 14334672560 0020023 0 ustar 00root root 0000000 0000000 import os
import posixpath
import secrets
import uuid
from concurrent import futures
import pytest
import fsspec
from pydrive2.auth import GoogleAuth
from pydrive2.fs import GDriveFileSystem
from pydrive2.test.test_util import settings_file_path, setup_credentials
from pydrive2.test.test_util import GDRIVE_USER_CREDENTIALS_DATA
TEST_GDRIVE_REPO_BUCKET = "root"
@pytest.fixture(scope="module")
def base_remote_dir():
path = TEST_GDRIVE_REPO_BUCKET + "/" + str(uuid.uuid4())
return path
@pytest.fixture
def remote_dir(base_remote_dir):
return base_remote_dir + "/" + str(uuid.uuid4())
@pytest.fixture
def fs(tmpdir, base_remote_dir):
setup_credentials()
auth = GoogleAuth(settings_file_path("default.yaml", tmpdir / ""))
auth.ServiceAuth()
bucket, base = base_remote_dir.split("/", 1)
fs = GDriveFileSystem(base_remote_dir, auth)
fs._gdrive_create_dir("root", base)
return fs
@pytest.mark.manual
def test_fs_oauth(base_remote_dir):
GDriveFileSystem(
base_remote_dir,
client_id="47794215776-cd9ssb6a4vv5otkq6n0iadpgc4efgjb1.apps.googleusercontent.com", # noqa: E501
client_secret="i2gerGA7uBjZbR08HqSOSt9Z",
)
def test_fs_service_json_file(base_remote_dir):
creds = "credentials/fs.dat"
setup_credentials(creds)
GDriveFileSystem(
base_remote_dir,
use_service_account=True,
client_json_file_path=creds,
)
def test_fs_service_json(base_remote_dir):
creds = os.environ[GDRIVE_USER_CREDENTIALS_DATA]
GDriveFileSystem(
base_remote_dir,
use_service_account=True,
client_json=creds,
)
def test_info(fs, tmpdir, remote_dir):
fs.touch(remote_dir + "/info/a.txt")
fs.touch(remote_dir + "/info/b.txt")
details = fs.info(remote_dir + "/info/a.txt")
assert details["type"] == "file"
assert details["name"] == remote_dir + "/info/a.txt"
assert details["size"] == 0
assert (
details["checksum"] == fs.info(remote_dir + "/info/b.txt")["checksum"]
)
details = fs.info(remote_dir + "/info")
assert details["type"] == "directory"
assert details["name"] == remote_dir + "/info/"
assert "checksum" not in details
details = fs.info(remote_dir + "/info/")
assert details["type"] == "directory"
assert details["name"] == remote_dir + "/info/"
def test_move(fs, remote_dir):
fs.touch(remote_dir + "/a.txt")
initial_info = fs.info(remote_dir + "/a.txt")
fs.move(remote_dir + "/a.txt", remote_dir + "/b.txt")
secondary_info = fs.info(remote_dir + "/b.txt")
assert not fs.exists(remote_dir + "/a.txt")
assert fs.exists(remote_dir + "/b.txt")
initial_info.pop("name")
secondary_info.pop("name")
assert initial_info == secondary_info
def test_rm(fs, remote_dir):
fs.touch(remote_dir + "/a.txt")
fs.rm(remote_dir + "/a.txt")
assert not fs.exists(remote_dir + "/a.txt")
fs.mkdir(remote_dir + "/dir")
fs.touch(remote_dir + "/dir/a")
fs.touch(remote_dir + "/dir/b")
fs.mkdir(remote_dir + "/dir/c/")
fs.touch(remote_dir + "/dir/c/a")
fs.rm(remote_dir + "/dir", recursive=True)
assert not fs.exists(remote_dir + "/dir/c/a")
def test_ls(fs, remote_dir):
fs.mkdir(remote_dir + "dir/")
files = set()
for no in range(8):
file = remote_dir + f"dir/test_{no}"
fs.touch(file)
files.add(file)
assert set(fs.ls(remote_dir + "dir/")) == files
dirs = fs.ls(remote_dir + "dir/", detail=True)
expected = [fs.info(file) for file in files]
def by_name(details):
return details["name"]
dirs.sort(key=by_name)
expected.sort(key=by_name)
assert dirs == expected
def test_find(fs, remote_dir):
fs.mkdir(remote_dir + "/dir")
files = [
"a",
"b",
"c/a",
"c/b",
"c/d/a",
"c/d/b",
"c/d/c",
"c/d/f/a",
"c/d/f/b",
]
files = [remote_dir + "/dir/" + file for file in files]
dirnames = {posixpath.dirname(file) for file in files}
for dirname in dirnames:
fs.mkdir(dirname)
for file in files:
fs.touch(file)
assert set(fs.find(remote_dir)) == set(files)
find_results = fs.find(remote_dir, detail=True)
info_results = [fs.info(file) for file in files]
info_results = {content["name"]: content for content in info_results}
assert find_results == info_results
def test_exceptions(fs, tmpdir, remote_dir):
with pytest.raises(FileNotFoundError):
with fs.open(remote_dir + "/a.txt"):
...
with pytest.raises(FileNotFoundError):
fs.copy(remote_dir + "/u.txt", remote_dir + "/y.txt")
with pytest.raises(FileNotFoundError):
fs.get_file(remote_dir + "/c.txt", tmpdir / "c.txt")
def test_open_rw(fs, remote_dir):
data = b"dvc.org"
with fs.open(remote_dir + "/a.txt", "wb") as stream:
stream.write(data)
with fs.open(remote_dir + "/a.txt") as stream:
assert stream.read() == data
def test_concurrent_operations(fs, remote_dir):
def create_random_file():
name = secrets.token_hex(16)
with fs.open(remote_dir + "/" + name, "w") as stream:
stream.write(name)
return name
def read_random_file(name):
with fs.open(remote_dir + "/" + name, "r") as stream:
return stream.read()
with futures.ThreadPoolExecutor() as executor:
write_futures, _ = futures.wait(
[executor.submit(create_random_file) for _ in range(64)],
return_when=futures.ALL_COMPLETED,
)
write_names = {future.result() for future in write_futures}
read_futures, _ = futures.wait(
[executor.submit(read_random_file, name) for name in write_names],
return_when=futures.ALL_COMPLETED,
)
read_names = {future.result() for future in read_futures}
assert write_names == read_names
def test_put_file(fs, tmpdir, remote_dir):
src_file = tmpdir / "a.txt"
with open(src_file, "wb") as file:
file.write(b"data")
fs.put_file(src_file, remote_dir + "/a.txt")
with fs.open(remote_dir + "/a.txt") as stream:
assert stream.read() == b"data"
def test_get_file(fs, tmpdir, remote_dir):
src_file = tmpdir / "a.txt"
dest_file = tmpdir / "b.txt"
with open(src_file, "wb") as file:
file.write(b"data")
fs.put_file(src_file, remote_dir + "/a.txt")
fs.get_file(remote_dir + "/a.txt", dest_file)
assert dest_file.read() == "data"
def test_get_file_callback(fs, tmpdir, remote_dir):
src_file = tmpdir / "a.txt"
dest_file = tmpdir / "b.txt"
with open(src_file, "wb") as file:
file.write(b"data" * 10)
fs.put_file(src_file, remote_dir + "/a.txt")
callback = fsspec.Callback()
fs.get_file(
remote_dir + "/a.txt", dest_file, callback=callback, block_size=10
)
assert dest_file.read() == "data" * 10
assert callback.size == 40
assert callback.value == 40
PyDrive2-1.15.0/pydrive2/test/test_oauth.py 0000664 0000000 0000000 00000014657 14334672560 0020545 0 ustar 00root root 0000000 0000000 import json
import os
import time
import pytest
from pydrive2.auth import GoogleAuth
from pydrive2.test.test_util import (
setup_credentials,
delete_file,
settings_file_path,
GDRIVE_USER_CREDENTIALS_DATA,
)
from oauth2client.file import Storage
def setup_module(module):
setup_credentials()
@pytest.mark.manual
def test_01_LocalWebserverAuthWithClientConfigFromFile():
# Delete old credentials file
delete_file("credentials/1.dat")
# Test if authentication works with config read from file
ga = GoogleAuth(settings_file_path("test_oauth_test_01.yaml"))
ga.LocalWebserverAuth()
assert not ga.access_token_expired
# Test if correct credentials file is created
CheckCredentialsFile("credentials/1.dat")
time.sleep(1)
@pytest.mark.manual
def test_02_LocalWebserverAuthWithClientConfigFromSettings():
# Delete old credentials file
delete_file("credentials/2.dat")
# Test if authentication works with config read from settings
ga = GoogleAuth(settings_file_path("test_oauth_test_02.yaml"))
ga.LocalWebserverAuth()
assert not ga.access_token_expired
# Test if correct credentials file is created
CheckCredentialsFile("credentials/2.dat")
time.sleep(1)
@pytest.mark.manual
def test_03_LocalWebServerAuthWithNoCredentialsSaving():
# Delete old credentials file
delete_file("credentials/3.dat")
ga = GoogleAuth(settings_file_path("test_oauth_test_03.yaml"))
assert not ga.settings["save_credentials"]
ga.LocalWebserverAuth()
assert not ga.access_token_expired
time.sleep(1)
@pytest.mark.manual
def test_04_CommandLineAuthWithClientConfigFromFile():
# Delete old credentials file
delete_file("credentials/4.dat")
# Test if authentication works with config read from file
ga = GoogleAuth(settings_file_path("test_oauth_test_04.yaml"))
ga.CommandLineAuth()
assert not ga.access_token_expired
# Test if correct credentials file is created
CheckCredentialsFile("credentials/4.dat")
time.sleep(1)
@pytest.mark.manual
def test_05_ConfigFromSettingsWithoutOauthScope():
# Test if authentication works without oauth_scope
ga = GoogleAuth(settings_file_path("test_oauth_test_05.yaml"))
ga.LocalWebserverAuth()
assert not ga.access_token_expired
time.sleep(1)
@pytest.mark.skip(reason="P12 authentication is deprecated")
def test_06_ServiceAuthFromSavedCredentialsP12File():
setup_credentials("credentials/6.dat")
ga = GoogleAuth(settings_file_path("test_oauth_test_06.yaml"))
ga.ServiceAuth()
assert not ga.access_token_expired
time.sleep(1)
def test_07_ServiceAuthFromSavedCredentialsJsonFile():
# Have an initial auth so that credentials/7.dat gets saved
ga = GoogleAuth(settings_file_path("test_oauth_test_07.yaml"))
credentials_file = ga.settings["save_credentials_file"]
# Delete old credentials file
delete_file(credentials_file)
assert not os.path.exists(credentials_file)
ga.ServiceAuth()
assert os.path.exists(credentials_file)
# Secondary auth should be made only using the previously saved
# login info
ga = GoogleAuth(settings_file_path("test_oauth_test_07.yaml"))
ga.ServiceAuth()
assert not ga.access_token_expired
time.sleep(1)
def test_08_ServiceAuthFromJsonFileNoCredentialsSaving():
# Test that no credentials are saved and API is still functional
# We are testing that there are no exceptions at least
ga = GoogleAuth(settings_file_path("test_oauth_test_08.yaml"))
assert not ga.settings["save_credentials"]
ga.ServiceAuth()
time.sleep(1)
def test_09_SaveLoadCredentialsUsesDefaultStorage(mocker):
# Test fix for https://github.com/iterative/PyDrive2/issues/163
# Make sure that Load and Save credentials by default reuse the
# same Storage (since it defined lock which make it TS)
ga = GoogleAuth(settings_file_path("test_oauth_test_09.yaml"))
credentials_file = ga.settings["save_credentials_file"]
# Delete old credentials file
delete_file(credentials_file)
assert not os.path.exists(credentials_file)
spy = mocker.spy(Storage, "__init__")
ga.ServiceAuth()
ga.LoadCredentials()
ga.SaveCredentials()
assert spy.call_count == 0
def test_10_ServiceAuthFromSavedCredentialsDictionary():
creds_dict = {}
settings = {
"client_config_backend": "service",
"service_config": {
"client_json_file_path": "/tmp/pydrive2/credentials.json",
},
"oauth_scope": ["https://www.googleapis.com/auth/drive"],
"save_credentials": True,
"save_credentials_backend": "dictionary",
"save_credentials_dict": creds_dict,
"save_credentials_key": "creds",
}
ga = GoogleAuth(settings=settings)
ga.ServiceAuth()
assert not ga.access_token_expired
assert creds_dict
first_creds_dict = creds_dict.copy()
# Secondary auth should be made only using the previously saved
# login info
ga = GoogleAuth(settings=settings)
ga.ServiceAuth()
assert not ga.access_token_expired
assert creds_dict == first_creds_dict
time.sleep(1)
def test_11_ServiceAuthFromJsonNoCredentialsSaving():
client_json = os.environ[GDRIVE_USER_CREDENTIALS_DATA]
settings = {
"client_config_backend": "service",
"service_config": {
"client_json": client_json,
},
"oauth_scope": ["https://www.googleapis.com/auth/drive"],
}
# Test that no credentials are saved and API is still functional
# We are testing that there are no exceptions at least
ga = GoogleAuth(settings=settings)
assert not ga.settings["save_credentials"]
ga.ServiceAuth()
time.sleep(1)
def test_12_ServiceAuthFromJsonDictNoCredentialsSaving():
client_json_dict = json.loads(os.environ[GDRIVE_USER_CREDENTIALS_DATA])
settings = {
"client_config_backend": "service",
"service_config": {
"client_json_dict": client_json_dict,
},
"oauth_scope": ["https://www.googleapis.com/auth/drive"],
}
# Test that no credentials are saved and API is still functional
# We are testing that there are no exceptions at least
ga = GoogleAuth(settings=settings)
assert not ga.settings["save_credentials"]
ga.ServiceAuth()
time.sleep(1)
def CheckCredentialsFile(credentials, no_file=False):
ga = GoogleAuth(settings_file_path("test_oauth_default.yaml"))
ga.LoadCredentialsFile(credentials)
assert ga.access_token_expired == no_file
PyDrive2-1.15.0/pydrive2/test/test_util.py 0000664 0000000 0000000 00000005102 14334672560 0020363 0 ustar 00root root 0000000 0000000 import random
import re
import os
import posixpath
from funcy import retry
from funcy.py3 import cat
from pydrive2.files import ApiRequestError
from shutil import copyfile, rmtree
newline_pattern = re.compile(r"[\r\n]")
GDRIVE_USER_CREDENTIALS_DATA = "GDRIVE_USER_CREDENTIALS_DATA"
DEFAULT_USER_CREDENTIALS_FILE = "/tmp/pydrive2/credentials.json"
TESTS_ROOTDIR = os.path.dirname(__file__)
SETTINGS_PATH = posixpath.join(TESTS_ROOTDIR, "settings/")
LOCAL_PATH = posixpath.join(TESTS_ROOTDIR, "settings/local/")
def setup_credentials(credentials_path=DEFAULT_USER_CREDENTIALS_FILE):
os.chdir(TESTS_ROOTDIR)
if os.getenv(GDRIVE_USER_CREDENTIALS_DATA):
if not os.path.exists(os.path.dirname(credentials_path)):
os.makedirs(os.path.dirname(credentials_path), exist_ok=True)
with open(credentials_path, "w") as credentials_file:
credentials_file.write(os.getenv(GDRIVE_USER_CREDENTIALS_DATA))
def settings_file_path(settings_file, wkdir=LOCAL_PATH):
template_path = SETTINGS_PATH + settings_file
local_path = wkdir + settings_file
assert os.path.exists(template_path)
if not os.path.exists(wkdir):
os.makedirs(wkdir, exist_ok=True)
if not os.path.exists(local_path):
copyfile(template_path, local_path)
return local_path
class PyDriveRetriableError(Exception):
pass
# 15 tries, start at 0.5s, multiply by golden ratio, cap at 20s
@retry(15, PyDriveRetriableError, timeout=lambda a: min(0.5 * 1.618**a, 20))
def pydrive_retry(call, *args, **kwargs):
try:
result = call(*args, **kwargs)
except ApiRequestError as exception:
if exception.error["code"] in [403, 500, 502, 503, 504]:
raise PyDriveRetriableError("Google API request failed")
raise
return result
def pydrive_list_item(drive, query, max_results=1000):
param = {"q": query, "maxResults": max_results}
file_list = drive.ListFile(param)
# Isolate and decorate fetching of remote drive items in pages
get_list = lambda: pydrive_retry(next, file_list, None) # noqa: E731
# Fetch pages until None is received, lazily flatten the thing
return cat(iter(get_list, None))
def CreateRandomFileName():
hash = random.getrandbits(128)
return "%032x" % hash
def StripNewlines(string):
return newline_pattern.sub("", string)
def create_file(path, content):
with open(path, "w") as f:
f.write(content)
def delete_file(path):
if os.path.exists(path):
os.remove(path)
return True
return False
def delete_dir(path):
rmtree(path, ignore_errors=True)
PyDrive2-1.15.0/pyproject.toml 0000664 0000000 0000000 00000000433 14334672560 0016170 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
[tool.setuptools_scm]
[tool.black]
line-length = 79
include = '\.pyi?$'
exclude = '''
/(
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| _build
| buck-out
| build
| dist
)/
'''
PyDrive2-1.15.0/pytest.ini 0000664 0000000 0000000 00000000164 14334672560 0015306 0 ustar 00root root 0000000 0000000 [pytest]
markers =
manual: mark tests to be runnable only in local environment and require user manual actions.
PyDrive2-1.15.0/setup.py 0000664 0000000 0000000 00000003640 14334672560 0014771 0 ustar 00root root 0000000 0000000 from setuptools import setup
# Extra dependecies to run tests
tests_requirements = [
"pytest>=4.6.0",
"timeout-decorator",
"funcy>=1.14",
"flake8",
"flake8-docstrings",
"pytest-mock",
"pyinstaller",
"importlib_resources; python_version < '3.10'",
]
tests_requirements.append("black==22.10.0")
setup(
name="PyDrive2",
author="JunYoung Gwak",
author_email="jgwak@dreamylab.com",
maintainer="DVC team",
maintainer_email="support@dvc.org",
packages=[
"pydrive2",
"pydrive2.test",
"pydrive2.fs",
"pydrive2.__pyinstaller",
],
url="https://github.com/iterative/PyDrive2",
project_urls={
"Documentation": "https://docs.iterative.ai/PyDrive2",
"Changelog": "https://github.com/iterative/PyDrive2/releases",
},
license="Apache License 2.0",
description="Google Drive API made easy. Maintained fork of PyDrive.",
long_description=open("README.rst").read(),
long_description_content_type="text/x-rst",
install_requires=[
"google-api-python-client >= 1.12.5",
"oauth2client >= 4.0.0",
"PyYAML >= 3.0",
"pyOpenSSL >= 19.1.0",
],
extras_require={
"fsspec": [
"fsspec >= 2021.07.0",
"tqdm >= 4.0.0",
"funcy >= 1.14",
"appdirs >= 1.4.3",
],
"tests": tests_requirements,
},
python_requires=">=3.7",
classifiers=[
"Development Status :: 4 - Beta",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
entry_points={
"pyinstaller40": [
"hook-dirs = pydrive2.__pyinstaller:get_hook_dirs",
"tests = pydrive2.__pyinstaller:get_PyInstaller_tests",
]
},
)
PyDrive2-1.15.0/tox.ini 0000664 0000000 0000000 00000000717 14334672560 0014574 0 ustar 00root root 0000000 0000000 [tox]
envlist = py36, py37, py38, py39
[testenv]
changedir = {toxinidir}/pydrive2/test
deps =
pytest
httplib2
PyYAML
timeout_decorator
futures
git+https://github.com/google/google-api-python-client.git
commands =
py.test -v -s
[flake8]
ignore =
E203, # Whitespace before ':'
E266, # Too many leading '#' for block comment
W503, # Line break occurred before a binary operator
max-line-length = 89
select = B,C,E,F,W,T4,B9