pax_global_header00006660000000000000000000000064140014305320014502gustar00rootroot0000000000000052 comment=52a73a6ab8d0a52fdd66abb1810d5650cc4aa8ac fabric-2.6.0/000077500000000000000000000000001400143053200127355ustar00rootroot00000000000000fabric-2.6.0/.coveragerc000066400000000000000000000000471400143053200150570ustar00rootroot00000000000000[run] branch = True include = fabric/* fabric-2.6.0/.gitignore000066400000000000000000000002201400143053200147170ustar00rootroot00000000000000*~ *.pyc *.pyo *.pyt *.pytc *.egg .DS_Store .*.swp *.egg-info .coverage sites/*/_build dist build/ tags TAGS .tox tox.ini .idea/ htmlcov .cache fabric-2.6.0/.travis.yml000066400000000000000000000062301400143053200150470ustar00rootroot00000000000000language: python sudo: required dist: trusty cache: directories: - $HOME/.cache/pip python: - "2.7" - "3.4" - "3.5" - "3.6" - "pypy" - "pypy3" matrix: # pypy3 (as of 2.4.0) has a wacky arity issue in its source loader. Allow it # to fail until we can test on, and require, PyPy3.3+. See # pyinvoke/invoke#358. # NOTE: both pypy flavors are weirdly unstable on Travis nowadays, even # pre-test-run. allow_failures: - python: pypy - python: pypy3 # Disabled per https://github.com/travis-ci/travis-ci/issues/1696 # fast_finish: true install: # TODO: real test matrix with at least some cells combining different invoke # and/or paramiko versions, released versions, etc # Invoke from master for parity - "pip install -e git+https://github.com/pyinvoke/invoke#egg=invoke" # And invocations, ditto - "pip install -e git+https://github.com/pyinvoke/invocations#egg=invocations" # Paramiko ditto - "pip install -e git+https://github.com/paramiko/paramiko#egg=paramiko" # Self - pip install -e . # Limit setuptools as some newer versions have Issues(tm). This needs doing # as its own step; trying to do it via requirements.txt isn't always # sufficient. - pip install "setuptools<34" # Dev requirements # TODO: follow invoke and split it up a bit so we're not pulling down # conflicting or unused-by-travis deps? - pip install -r dev-requirements.txt # Sanity test of the Invoke layer, if that's busted everything is - inv --list # Sanity test of Fabric itself - fab --version before_script: # Create 'sudouser' w/ sudo password & perms on Travis' homedir - inv travis.make-sudouser # Allow us to SSH passwordless to localhost - inv travis.make-sshable script: # Fast syntax check failures for more rapid feedback to submitters # (Travis-oriented metatask that version checks Python, installs, runs.) - inv travis.blacken # I have this in my git pre-push hook, but contributors probably don't - flake8 # Execute full test suite + coverage, as the new sudo-capable user - inv travis.sudo-coverage # Execute integration tests too. TODO: merge under coverage...somehow # NOTE: this also runs as the sudo-capable user, even if it's not necessarily # doing any sudo'ing itself - the sudo-capable user is also the ssh-able # user... - inv travis.sudo-run "inv integration" # Websites build OK? (Not on PyPy3, Sphinx is all "who the hell are you?" =/ - "if [[ $TRAVIS_PYTHON_VERSION != 'pypy3' ]]; then inv sites www.doctest docs.doctest; fi" # Did we break setup.py? - inv travis.test-installation --package=fabric --sanity="fab --version" # Test distribution builds. - inv travis.test-packaging --package=fabric --sanity="fab --version" # Again, but as 'fabric2' - rm -rf tmp - pip uninstall -y fabric - "PACKAGE_AS_FABRIC2=yes inv travis.test-packaging --package=fabric2 --sanity=\"fab2 --version\"" - inv sanity-test-from-v1 #after_success: # Upload coverage data to codecov #- codecov notifications: irc: channels: "irc.freenode.org#fabric" template: - "%{repository_name}@%{branch}: %{message} (%{build_url})" on_success: change on_failure: change email: false fabric-2.6.0/LICENSE000066400000000000000000000024421400143053200137440ustar00rootroot00000000000000Copyright (c) 2020 Jeff Forcier. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. fabric-2.6.0/MANIFEST.in000066400000000000000000000004261400143053200144750ustar00rootroot00000000000000include LICENSE include README.rst include tasks.py recursive-include sites * recursive-exclude sites/*/_build * include dev-requirements.txt recursive-include tests * recursive-exclude tests *.pyc *.pyo recursive-include integration * recursive-exclude integration *.pyc *.pyo fabric-2.6.0/README.rst000066400000000000000000000012131400143053200144210ustar00rootroot00000000000000Welcome to Fabric! ================== Fabric is a high level Python (2.7, 3.4+) library designed to execute shell commands remotely over SSH, yielding useful Python objects in return. It builds on top of `Invoke `_ (subprocess command execution and command-line features) and `Paramiko `_ (SSH protocol implementation), extending their APIs to complement one another and provide additional functionality. For a high level introduction, including example code, please see `our main project website `_; or for detailed API docs, see `the versioned API website `_. fabric-2.6.0/codecov.yml000066400000000000000000000001331400143053200150770ustar00rootroot00000000000000# No codecov comments at all, please - just the github 'checks' is sufficient comment: off fabric-2.6.0/dev-requirements.txt000066400000000000000000000011361400143053200167760ustar00rootroot00000000000000# Invoke implicitly required by self/pip install -e . # Invocations for common project tasks invocations>=1.0,<2.0 # Invoke from git, temporarily? -e git+https://github.com/pyinvoke/invoke#egg=invoke # pytest-relaxed for test organization, display etc tweaks pytest-relaxed>=1.0.1,<1.1 pytest==3.2.5 # pytest-cov for coverage pytest-cov==2.5.1 six==1.10.0 # Mock for test mocking mock==2.0.0 # Linting! flake8==3.6.0 # Coverage! coverage==5.3.1 codecov==2.1.11 # Documentation tools sphinx>=1.4,<1.7 alabaster==0.7.12 releases>=1.5,<2.0 # Release tools semantic_version>=2.4,<2.5 wheel==0.24 twine==1.11.0 fabric-2.6.0/fabric/000077500000000000000000000000001400143053200141635ustar00rootroot00000000000000fabric-2.6.0/fabric/__init__.py000066400000000000000000000004221400143053200162720ustar00rootroot00000000000000# flake8: noqa from ._version import __version_info__, __version__ from .connection import Config, Connection from .runners import Remote, Result from .group import Group, SerialGroup, ThreadingGroup, GroupResult from .tasks import task, Task from .executor import Executor fabric-2.6.0/fabric/__main__.py000066400000000000000000000002131400143053200162510ustar00rootroot00000000000000""" This code provides the ability to run fabric package as a script Usage: python -m fabric """ from .main import program program.run() fabric-2.6.0/fabric/_version.py000066400000000000000000000001201400143053200163520ustar00rootroot00000000000000__version_info__ = (2, 6, 0) __version__ = ".".join(map(str, __version_info__)) fabric-2.6.0/fabric/config.py000066400000000000000000000331031400143053200160020ustar00rootroot00000000000000import copy import errno import os from invoke.config import Config as InvokeConfig, merge_dicts from paramiko.config import SSHConfig from .runners import Remote from .util import get_local_user, debug class Config(InvokeConfig): """ An `invoke.config.Config` subclass with extra Fabric-related behavior. This class behaves like `invoke.config.Config` in every way, with the following exceptions: - its `global_defaults` staticmethod has been extended to add/modify some default settings (see its documentation, below, for details); - it triggers loading of Fabric-specific env vars (e.g. ``FABRIC_RUN_HIDE=true`` instead of ``INVOKE_RUN_HIDE=true``) and filenames (e.g. ``/etc/fabric.yaml`` instead of ``/etc/invoke.yaml``). - it extends the API to account for loading ``ssh_config`` files (which are stored as additional attributes and have no direct relation to the regular config data/hierarchy.) - it adds a new optional constructor, `from_v1`, which :ref:`generates configuration data from Fabric 1 `. Intended for use with `.Connection`, as using vanilla `invoke.config.Config` objects would require users to manually define ``port``, ``user`` and so forth. .. seealso:: :doc:`/concepts/configuration`, :ref:`ssh-config` .. versionadded:: 2.0 """ prefix = "fabric" @classmethod def from_v1(cls, env, **kwargs): """ Alternate constructor which uses Fabric 1's ``env`` dict for settings. All keyword arguments besides ``env`` are passed unmolested into the primary constructor, with the exception of ``overrides``, which is used internally & will end up resembling the data from ``env`` with the user-supplied overrides on top. .. warning:: Because your own config overrides will win over data from ``env``, make sure you only set values you *intend* to change from your v1 environment! For details on exactly which ``env`` vars are imported and what they become in the new API, please see :ref:`v1-env-var-imports`. :param env: An explicit Fabric 1 ``env`` dict (technically, any ``fabric.utils._AttributeDict`` instance should work) to pull configuration from. .. versionadded:: 2.4 """ # TODO: automagic import, if we can find a way to test that # Use overrides level (and preserve whatever the user may have given) # TODO: we really do want arbitrary number of config levels, don't we? # TODO: most of these need more care re: only filling in when they # differ from the v1 default. As-is these won't overwrite runtime # overrides (due to .setdefault) but they may still be filling in empty # values to stomp on lower level config levels... data = kwargs.pop("overrides", {}) # TODO: just use a dataproxy or defaultdict?? for subdict in ("connect_kwargs", "run", "sudo", "timeouts"): data.setdefault(subdict, {}) # PTY use data["run"].setdefault("pty", env.always_use_pty) # Gateway data.setdefault("gateway", env.gateway) # Agent forwarding data.setdefault("forward_agent", env.forward_agent) # Key filename(s) if env.key_filename is not None: data["connect_kwargs"].setdefault("key_filename", env.key_filename) # Load keys from agent? data["connect_kwargs"].setdefault("allow_agent", not env.no_agent) data.setdefault("ssh_config_path", env.ssh_config_path) # Sudo password data["sudo"].setdefault("password", env.sudo_password) # Vanilla password (may be used for regular and/or sudo, depending) passwd = env.password data["connect_kwargs"].setdefault("password", passwd) if not data["sudo"]["password"]: data["sudo"]["password"] = passwd data["sudo"].setdefault("prompt", env.sudo_prompt) data["timeouts"].setdefault("connect", env.timeout) data.setdefault("load_ssh_configs", env.use_ssh_config) data["run"].setdefault("warn", env.warn_only) # Put overrides back for real constructor and go kwargs["overrides"] = data return cls(**kwargs) def __init__(self, *args, **kwargs): """ Creates a new Fabric-specific config object. For most API details, see `invoke.config.Config.__init__`. Parameters new to this subclass are listed below. :param ssh_config: Custom/explicit `paramiko.config.SSHConfig` object. If given, prevents loading of any SSH config files. Default: ``None``. :param str runtime_ssh_path: Runtime SSH config path to load. Prevents loading of system/user files if given. Default: ``None``. :param str system_ssh_path: Location of the system-level SSH config file. Default: ``/etc/ssh/ssh_config``. :param str user_ssh_path: Location of the user-level SSH config file. Default: ``~/.ssh/config``. :param bool lazy: Has the same meaning as the parent class' ``lazy``, but additionally controls whether SSH config file loading is deferred (requires manually calling `load_ssh_config` sometime.) For example, one may need to wait for user input before calling `set_runtime_ssh_path`, which will inform exactly what `load_ssh_config` does. """ # Tease out our own kwargs. # TODO: consider moving more stuff out of __init__ and into methods so # there's less of this sort of splat-args + pop thing? Eh. ssh_config = kwargs.pop("ssh_config", None) lazy = kwargs.get("lazy", False) self.set_runtime_ssh_path(kwargs.pop("runtime_ssh_path", None)) system_path = kwargs.pop("system_ssh_path", "/etc/ssh/ssh_config") self._set(_system_ssh_path=system_path) self._set(_user_ssh_path=kwargs.pop("user_ssh_path", "~/.ssh/config")) # Record whether we were given an explicit object (so other steps know # whether to bother loading from disk or not) # This needs doing before super __init__ as that calls our post_init explicit = ssh_config is not None self._set(_given_explicit_object=explicit) # Arrive at some non-None SSHConfig object (upon which to run .parse() # later, in _load_ssh_file()) if ssh_config is None: ssh_config = SSHConfig() self._set(base_ssh_config=ssh_config) # Now that our own attributes have been prepared & kwargs yanked, we # can fall up into parent __init__() super(Config, self).__init__(*args, **kwargs) # And finally perform convenience non-lazy bits if needed if not lazy: self.load_ssh_config() def set_runtime_ssh_path(self, path): """ Configure a runtime-level SSH config file path. If set, this will cause `load_ssh_config` to skip system and user files, as OpenSSH does. .. versionadded:: 2.0 """ self._set(_runtime_ssh_path=path) def load_ssh_config(self): """ Load SSH config file(s) from disk. Also (beforehand) ensures that Invoke-level config re: runtime SSH config file paths, is accounted for. .. versionadded:: 2.0 """ # Update the runtime SSH config path (assumes enough regular config # levels have been loaded that anyone wanting to transmit this info # from a 'vanilla' Invoke config, has gotten it set.) if self.ssh_config_path: self._runtime_ssh_path = self.ssh_config_path # Load files from disk if we weren't given an explicit SSHConfig in # __init__ if not self._given_explicit_object: self._load_ssh_files() def clone(self, *args, **kwargs): # TODO: clone() at this point kinda-sorta feels like it's retreading # __reduce__ and the related (un)pickling stuff... # Get cloned obj. # NOTE: Because we also extend .init_kwargs, the actual core SSHConfig # data is passed in at init time (ensuring no files get loaded a 2nd, # etc time) and will already be present, so we don't need to set # .base_ssh_config ourselves. Similarly, there's no need to worry about # how the SSH config paths may be inaccurate until below; nothing will # be referencing them. new = super(Config, self).clone(*args, **kwargs) # Copy over our custom attributes, so that the clone still resembles us # re: recording where the data originally came from (in case anything # re-runs ._load_ssh_files(), for example). for attr in ( "_runtime_ssh_path", "_system_ssh_path", "_user_ssh_path", ): setattr(new, attr, getattr(self, attr)) # Load SSH configs, in case they weren't prior to now (e.g. a vanilla # Invoke clone(into), instead of a us-to-us clone.) self.load_ssh_config() # All done return new def _clone_init_kwargs(self, *args, **kw): # Parent kwargs kwargs = super(Config, self)._clone_init_kwargs(*args, **kw) # Transmit our internal SSHConfig via explicit-obj kwarg, thus # bypassing any file loading. (Our extension of clone() above copies # over other attributes as well so that the end result looks consistent # with reality.) new_config = SSHConfig() # TODO: as with other spots, this implies SSHConfig needs a cleaner # public API re: creating and updating its core data. new_config._config = copy.deepcopy(self.base_ssh_config._config) return dict(kwargs, ssh_config=new_config) def _load_ssh_files(self): """ Trigger loading of configured SSH config file paths. Expects that ``base_ssh_config`` has already been set to an `~paramiko.config.SSHConfig` object. :returns: ``None``. """ # TODO: does this want to more closely ape the behavior of # InvokeConfig.load_files? re: having a _found attribute for each that # determines whether to load or skip if self._runtime_ssh_path is not None: path = self._runtime_ssh_path # Manually blow up like open() (_load_ssh_file normally doesn't) if not os.path.exists(path): msg = "No such file or directory: {!r}".format(path) raise IOError(errno.ENOENT, msg) self._load_ssh_file(os.path.expanduser(path)) elif self.load_ssh_configs: for path in (self._user_ssh_path, self._system_ssh_path): self._load_ssh_file(os.path.expanduser(path)) def _load_ssh_file(self, path): """ Attempt to open and parse an SSH config file at ``path``. Does nothing if ``path`` is not a path to a valid file. :returns: ``None``. """ if os.path.isfile(path): old_rules = len(self.base_ssh_config._config) with open(path) as fd: self.base_ssh_config.parse(fd) new_rules = len(self.base_ssh_config._config) msg = "Loaded {} new ssh_config rules from {!r}" debug(msg.format(new_rules - old_rules, path)) else: debug("File not found, skipping") @staticmethod def global_defaults(): """ Default configuration values and behavior toggles. Fabric only extends this method in order to make minor adjustments and additions to Invoke's `~invoke.config.Config.global_defaults`; see its documentation for the base values, such as the config subtrees controlling behavior of ``run`` or how ``tasks`` behave. For Fabric-specific modifications and additions to the Invoke-level defaults, see our own config docs at :ref:`default-values`. .. versionadded:: 2.0 """ # TODO: hrm should the run-related things actually be derived from the # runner_class? E.g. Local defines local stuff, Remote defines remote # stuff? Doesn't help with the final config tree tho... # TODO: as to that, this is a core problem, Fabric wants split # local/remote stuff, eg replace_env wants to be False for local and # True remotely; shell wants to differ depending on target (and either # way, does not want to use local interrogation for remote) # TODO: is it worth moving all of our 'new' settings to a discrete # namespace for cleanliness' sake? e.g. ssh.port, ssh.user etc. # It wouldn't actually simplify this code any, but it would make it # easier for users to determine what came from which library/repo. defaults = InvokeConfig.global_defaults() ours = { # New settings "connect_kwargs": {}, "forward_agent": False, "gateway": None, # TODO 3.0: change to True and update all docs accordingly. "inline_ssh_env": False, "load_ssh_configs": True, "port": 22, "run": {"replace_env": True}, "runners": {"remote": Remote}, "ssh_config_path": None, "tasks": {"collection_name": "fabfile"}, # TODO: this becomes an override/extend once Invoke grows execution # timeouts (which should be timeouts.execute) "timeouts": {"connect": None}, "user": get_local_user(), } merge_dicts(defaults, ours) return defaults fabric-2.6.0/fabric/connection.py000066400000000000000000001174241400143053200167050ustar00rootroot00000000000000from contextlib import contextmanager from threading import Event try: from invoke.vendor.six import StringIO from invoke.vendor.decorator import decorator from invoke.vendor.six import string_types except ImportError: from six import StringIO from decorator import decorator from six import string_types import socket from invoke import Context from invoke.exceptions import ThreadException from paramiko.agent import AgentRequestHandler from paramiko.client import SSHClient, AutoAddPolicy from paramiko.config import SSHConfig from paramiko.proxy import ProxyCommand from .config import Config from .exceptions import InvalidV1Env from .transfer import Transfer from .tunnels import TunnelManager, Tunnel @decorator def opens(method, self, *args, **kwargs): self.open() return method(self, *args, **kwargs) def derive_shorthand(host_string): user_hostport = host_string.rsplit("@", 1) hostport = user_hostport.pop() user = user_hostport[0] if user_hostport and user_hostport[0] else None # IPv6: can't reliably tell where addr ends and port begins, so don't # try (and don't bother adding special syntax either, user should avoid # this situation by using port=). if hostport.count(":") > 1: host = hostport port = None # IPv4: can split on ':' reliably. else: host_port = hostport.rsplit(":", 1) host = host_port.pop(0) or None port = host_port[0] if host_port and host_port[0] else None if port is not None: port = int(port) return {"user": user, "host": host, "port": port} class Connection(Context): """ A connection to an SSH daemon, with methods for commands and file transfer. **Basics** This class inherits from Invoke's `~invoke.context.Context`, as it is a context within which commands, tasks etc can operate. It also encapsulates a Paramiko `~paramiko.client.SSHClient` instance, performing useful high level operations with that `~paramiko.client.SSHClient` and `~paramiko.channel.Channel` instances generated from it. .. _connect_kwargs: .. note:: Many SSH specific options -- such as specifying private keys and passphrases, timeouts, disabling SSH agents, etc -- are handled directly by Paramiko and should be specified via the :ref:`connect_kwargs argument ` of the constructor. **Lifecycle** `.Connection` has a basic "`create <__init__>`, `connect/open `, `do work `, `disconnect/close `" lifecycle: - `Instantiation <__init__>` imprints the object with its connection parameters (but does **not** actually initiate the network connection). - An alternate constructor exists for users :ref:`upgrading piecemeal from Fabric 1 `: `from_v1` - Methods like `run`, `get` etc automatically trigger a call to `open` if the connection is not active; users may of course call `open` manually if desired. - Connections do not always need to be explicitly closed; much of the time, Paramiko's garbage collection hooks or Python's own shutdown sequence will take care of things. **However**, should you encounter edge cases (for example, sessions hanging on exit) it's helpful to explicitly close connections when you're done with them. This can be accomplished by manually calling `close`, or by using the object as a contextmanager:: with Connection('host') as c: c.run('command') c.put('file') .. note:: This class rebinds `invoke.context.Context.run` to `.local` so both remote and local command execution can coexist. **Configuration** Most `.Connection` parameters honor :doc:`Invoke-style configuration ` as well as any applicable :ref:`SSH config file directives `. For example, to end up with a connection to ``admin@myhost``, one could: - Use any built-in config mechanism, such as ``/etc/fabric.yml``, ``~/.fabric.json``, collection-driven configuration, env vars, etc, stating ``user: admin`` (or ``{"user": "admin"}``, depending on config format.) Then ``Connection('myhost')`` would implicitly have a ``user`` of ``admin``. - Use an SSH config file containing ``User admin`` within any applicable ``Host`` header (``Host myhost``, ``Host *``, etc.) Again, ``Connection('myhost')`` will default to an ``admin`` user. - Leverage host-parameter shorthand (described in `.Config.__init__`), i.e. ``Connection('admin@myhost')``. - Give the parameter directly: ``Connection('myhost', user='admin')``. The same applies to agent forwarding, gateways, and so forth. .. versionadded:: 2.0 """ # NOTE: these are initialized here to hint to invoke.Config.__setattr__ # that they should be treated as real attributes instead of config proxies. # (Additionally, we're doing this instead of using invoke.Config._set() so # we can take advantage of Sphinx's attribute-doc-comment static analysis.) # Once an instance is created, these values will usually be non-None # because they default to the default config values. host = None original_host = None user = None port = None ssh_config = None gateway = None forward_agent = None connect_timeout = None connect_kwargs = None client = None transport = None _sftp = None _agent_handler = None @classmethod def from_v1(cls, env, **kwargs): """ Alternate constructor which uses Fabric 1's ``env`` dict for settings. All keyword arguments besides ``env`` are passed unmolested into the primary constructor. .. warning:: Because your own config overrides will win over data from ``env``, make sure you only set values you *intend* to change from your v1 environment! For details on exactly which ``env`` vars are imported and what they become in the new API, please see :ref:`v1-env-var-imports`. :param env: An explicit Fabric 1 ``env`` dict (technically, any ``fabric.utils._AttributeDict`` instance should work) to pull configuration from. .. versionadded:: 2.4 """ # TODO: import fabric.state.env (need good way to test it first...) # TODO: how to handle somebody accidentally calling this in a process # where 'fabric' is fabric 2, and there's no fabric 1? Probably just a # re-raise of ImportError?? # Our only requirement is a non-empty host_string if not env.host_string: raise InvalidV1Env( "Supplied v1 env has an empty `host_string` value! Please make sure you're calling Connection.from_v1 within a connected Fabric 1 session." # noqa ) # TODO: detect collisions with kwargs & except instead of overwriting? # (More Zen of Python compliant, but also, effort, and also, makes it # harder for users to intentionally overwrite!) connect_kwargs = kwargs.setdefault("connect_kwargs", {}) kwargs.setdefault("host", env.host_string) shorthand = derive_shorthand(env.host_string) # TODO: don't we need to do the below skipping for user too? kwargs.setdefault("user", env.user) # Skip port if host string seemed to have it; otherwise we hit our own # ambiguity clause in __init__. v1 would also have been doing this # anyways (host string wins over other settings). if not shorthand["port"]: # Run port through int(); v1 inexplicably has a string default... kwargs.setdefault("port", int(env.port)) # key_filename defaults to None in v1, but in v2, we expect it to be # either unset, or set to a list. Thus, we only pull it over if it is # not None. if env.key_filename is not None: connect_kwargs.setdefault("key_filename", env.key_filename) # Obtain config values, if not given, from its own from_v1 # NOTE: not using setdefault as we truly only want to call # Config.from_v1 when necessary. if "config" not in kwargs: kwargs["config"] = Config.from_v1(env) return cls(**kwargs) # TODO: should "reopening" an existing Connection object that has been # closed, be allowed? (See e.g. how v1 detects closed/semi-closed # connections & nukes them before creating a new client to the same host.) # TODO: push some of this into paramiko.client.Client? e.g. expand what # Client.exec_command does, it already allows configuring a subset of what # we do / will eventually do / did in 1.x. It's silly to have to do # .get_transport().open_session(). def __init__( self, host, user=None, port=None, config=None, gateway=None, forward_agent=None, connect_timeout=None, connect_kwargs=None, inline_ssh_env=None, ): """ Set up a new object representing a server connection. :param str host: the hostname (or IP address) of this connection. May include shorthand for the ``user`` and/or ``port`` parameters, of the form ``user@host``, ``host:port``, or ``user@host:port``. .. note:: Due to ambiguity, IPv6 host addresses are incompatible with the ``host:port`` shorthand (though ``user@host`` will still work OK). In other words, the presence of >1 ``:`` character will prevent any attempt to derive a shorthand port number; use the explicit ``port`` parameter instead. .. note:: If ``host`` matches a ``Host`` clause in loaded SSH config data, and that ``Host`` clause contains a ``Hostname`` directive, the resulting `.Connection` object will behave as if ``host`` is equal to that ``Hostname`` value. In all cases, the original value of ``host`` is preserved as the ``original_host`` attribute. Thus, given SSH config like so:: Host myalias Hostname realhostname a call like ``Connection(host='myalias')`` will result in an object whose ``host`` attribute is ``realhostname``, and whose ``original_host`` attribute is ``myalias``. :param str user: the login user for the remote connection. Defaults to ``config.user``. :param int port: the remote port. Defaults to ``config.port``. :param config: configuration settings to use when executing methods on this `.Connection` (e.g. default SSH port and so forth). Should be a `.Config` or an `invoke.config.Config` (which will be turned into a `.Config`). Default is an anonymous `.Config` object. :param gateway: An object to use as a proxy or gateway for this connection. This parameter accepts one of the following: - another `.Connection` (for a ``ProxyJump`` style gateway); - a shell command string (for a ``ProxyCommand`` style style gateway). Default: ``None``, meaning no gatewaying will occur (unless otherwise configured; if one wants to override a configured gateway at runtime, specify ``gateway=False``.) .. seealso:: :ref:`ssh-gateways` :param bool forward_agent: Whether to enable SSH agent forwarding. Default: ``config.forward_agent``. :param int connect_timeout: Connection timeout, in seconds. Default: ``config.timeouts.connect``. :param dict connect_kwargs: .. _connect_kwargs-arg: Keyword arguments handed verbatim to `SSHClient.connect ` (when `.open` is called). `.Connection` tries not to grow additional settings/kwargs of its own unless it is adding value of some kind; thus, ``connect_kwargs`` is currently the right place to hand in paramiko connection parameters such as ``pkey`` or ``key_filename``. For example:: c = Connection( host="hostname", user="admin", connect_kwargs={ "key_filename": "/home/myuser/.ssh/private.key", }, ) Default: ``config.connect_kwargs``. :param bool inline_ssh_env: Whether to send environment variables "inline" as prefixes in front of command strings (``export VARNAME=value && mycommand here``), instead of trying to submit them through the SSH protocol itself (which is the default behavior). This is necessary if the remote server has a restricted ``AcceptEnv`` setting (which is the common default). The default value is the value of the ``inline_ssh_env`` :ref:`configuration value ` (which itself defaults to ``False``). .. warning:: This functionality does **not** currently perform any shell escaping on your behalf! Be careful when using nontrivial values, and note that you can put in your own quoting, backslashing etc if desired. Consider using a different approach (such as actual remote shell scripts) if you run into too many issues here. .. note:: When serializing into prefixed ``FOO=bar`` format, we apply the builtin `sorted` function to the env dictionary's keys, to remove what would otherwise be ambiguous/arbitrary ordering. .. note:: This setting has no bearing on *local* shell commands; it only affects remote commands, and thus, methods like `.run` and `.sudo`. :raises ValueError: if user or port values are given via both ``host`` shorthand *and* their own arguments. (We `refuse the temptation to guess`_). .. _refuse the temptation to guess: http://zen-of-python.info/ in-the-face-of-ambiguity-refuse-the-temptation-to-guess.html#12 .. versionchanged:: 2.3 Added the ``inline_ssh_env`` parameter. """ # NOTE: parent __init__ sets self._config; for now we simply overwrite # that below. If it's somehow problematic we would want to break parent # __init__ up in a manner that is more cleanly overrideable. super(Connection, self).__init__(config=config) #: The .Config object referenced when handling default values (for e.g. #: user or port, when not explicitly given) or deciding how to behave. if config is None: config = Config() # Handle 'vanilla' Invoke config objects, which need cloning 'into' one # of our own Configs (which grants the new defaults, etc, while not # squashing them if the Invoke-level config already accounted for them) elif not isinstance(config, Config): config = config.clone(into=Config) self._set(_config=config) # TODO: when/how to run load_files, merge, load_shell_env, etc? # TODO: i.e. what is the lib use case here (and honestly in invoke too) shorthand = self.derive_shorthand(host) host = shorthand["host"] err = "You supplied the {} via both shorthand and kwarg! Please pick one." # noqa if shorthand["user"] is not None: if user is not None: raise ValueError(err.format("user")) user = shorthand["user"] if shorthand["port"] is not None: if port is not None: raise ValueError(err.format("port")) port = shorthand["port"] # NOTE: we load SSH config data as early as possible as it has # potential to affect nearly every other attribute. #: The per-host SSH config data, if any. (See :ref:`ssh-config`.) self.ssh_config = self.config.base_ssh_config.lookup(host) self.original_host = host #: The hostname of the target server. self.host = host if "hostname" in self.ssh_config: # TODO: log that this occurred? self.host = self.ssh_config["hostname"] #: The username this connection will use to connect to the remote end. self.user = user or self.ssh_config.get("user", self.config.user) # TODO: is it _ever_ possible to give an empty user value (e.g. # user='')? E.g. do some SSH server specs allow for that? #: The network port to connect on. self.port = port or int(self.ssh_config.get("port", self.config.port)) # Gateway/proxy/bastion/jump setting: non-None values - string, # Connection, even eg False - get set directly; None triggers seek in # config/ssh_config #: The gateway `.Connection` or ``ProxyCommand`` string to be used, #: if any. self.gateway = gateway if gateway is not None else self.get_gateway() # NOTE: we use string above, vs ProxyCommand obj, to avoid spinning up # the ProxyCommand subprocess at init time, vs open() time. # TODO: make paramiko.proxy.ProxyCommand lazy instead? if forward_agent is None: # Default to config... forward_agent = self.config.forward_agent # But if ssh_config is present, it wins if "forwardagent" in self.ssh_config: # TODO: SSHConfig really, seriously needs some love here, god map_ = {"yes": True, "no": False} forward_agent = map_[self.ssh_config["forwardagent"]] #: Whether agent forwarding is enabled. self.forward_agent = forward_agent if connect_timeout is None: connect_timeout = self.ssh_config.get( "connecttimeout", self.config.timeouts.connect ) if connect_timeout is not None: connect_timeout = int(connect_timeout) #: Connection timeout self.connect_timeout = connect_timeout #: Keyword arguments given to `paramiko.client.SSHClient.connect` when #: `open` is called. self.connect_kwargs = self.resolve_connect_kwargs(connect_kwargs) #: The `paramiko.client.SSHClient` instance this connection wraps. client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy()) self.client = client #: A convenience handle onto the return value of #: ``self.client.get_transport()``. self.transport = None if inline_ssh_env is None: inline_ssh_env = self.config.inline_ssh_env #: Whether to construct remote command lines with env vars prefixed #: inline. self.inline_ssh_env = inline_ssh_env def resolve_connect_kwargs(self, connect_kwargs): # Grab connect_kwargs from config if not explicitly given. if connect_kwargs is None: # TODO: is it better to pre-empt conflicts w/ manually-handled # connect() kwargs (hostname, username, etc) here or in open()? # We're doing open() for now in case e.g. someone manually modifies # .connect_kwargs attributewise, but otherwise it feels better to # do it early instead of late. connect_kwargs = self.config.connect_kwargs # Special case: key_filename gets merged instead of overridden. # TODO: probably want some sorta smart merging generally, special cases # are bad. elif "key_filename" in self.config.connect_kwargs: kwarg_val = connect_kwargs.get("key_filename", []) conf_val = self.config.connect_kwargs["key_filename"] # Config value comes before kwarg value (because it may contain # CLI flag value.) connect_kwargs["key_filename"] = conf_val + kwarg_val # SSH config identityfile values come last in the key_filename # 'hierarchy'. if "identityfile" in self.ssh_config: connect_kwargs.setdefault("key_filename", []) connect_kwargs["key_filename"].extend( self.ssh_config["identityfile"] ) return connect_kwargs def get_gateway(self): # SSH config wins over Invoke-style config if "proxyjump" in self.ssh_config: # Reverse hop1,hop2,hop3 style ProxyJump directive so we start # with the final (itself non-gatewayed) hop and work up to # the front (actual, supplied as our own gateway) hop hops = reversed(self.ssh_config["proxyjump"].split(",")) prev_gw = None for hop in hops: # Short-circuit if we appear to be our own proxy, which would # be a RecursionError. Implies SSH config wildcards. # TODO: in an ideal world we'd check user/port too in case they # differ, but...seriously? They can file a PR with those extra # half dozen test cases in play, E_NOTIME if self.derive_shorthand(hop)["host"] == self.host: return None # Happily, ProxyJump uses identical format to our host # shorthand... kwargs = dict(config=self.config.clone()) if prev_gw is not None: kwargs["gateway"] = prev_gw cxn = Connection(hop, **kwargs) prev_gw = cxn return prev_gw elif "proxycommand" in self.ssh_config: # Just a string, which we interpret as a proxy command.. return self.ssh_config["proxycommand"] # Fallback: config value (may be None). return self.config.gateway def __repr__(self): # Host comes first as it's the most common differentiator by far bits = [("host", self.host)] # TODO: maybe always show user regardless? Explicit is good... if self.user != self.config.user: bits.append(("user", self.user)) # TODO: harder to make case for 'always show port'; maybe if it's # non-22 (even if config has overridden the local default)? if self.port != self.config.port: bits.append(("port", self.port)) # NOTE: sometimes self.gateway may be eg False if someone wants to # explicitly override a configured non-None value (as otherwise it's # impossible for __init__ to tell if a None means "nothing given" or # "seriously please no gatewaying". So, this must always be a vanilla # truth test and not eg "is not None". if self.gateway: # Displaying type because gw params would probs be too verbose val = "proxyjump" if isinstance(self.gateway, string_types): val = "proxycommand" bits.append(("gw", val)) return "".format( " ".join("{}={}".format(*x) for x in bits) ) def _identity(self): # TODO: consider including gateway and maybe even other init kwargs? # Whether two cxns w/ same user/host/port but different # gateway/keys/etc, should be considered "the same", is unclear. return (self.host, self.user, self.port) def __eq__(self, other): if not isinstance(other, Connection): return False return self._identity() == other._identity() def __lt__(self, other): return self._identity() < other._identity() def __hash__(self): # NOTE: this departs from Context/DataProxy, which is not usefully # hashable. return hash(self._identity()) def derive_shorthand(self, host_string): # NOTE: used to be defined inline; preserving API call for both # backwards compatibility and because it seems plausible we may want to # modify behavior later, using eg config or other attributes. return derive_shorthand(host_string) @property def is_connected(self): """ Whether or not this connection is actually open. .. versionadded:: 2.0 """ return self.transport.active if self.transport else False def open(self): """ Initiate an SSH connection to the host/port this object is bound to. This may include activating the configured gateway connection, if one is set. Also saves a handle to the now-set Transport object for easier access. Various connect-time settings (and/or their corresponding :ref:`SSH config options `) are utilized here in the call to `SSHClient.connect `. (For details, see :doc:`the configuration docs `.) .. versionadded:: 2.0 """ # Short-circuit if self.is_connected: return err = "Refusing to be ambiguous: connect() kwarg '{}' was given both via regular arg and via connect_kwargs!" # noqa # These may not be given, period for key in """ hostname port username """.split(): if key in self.connect_kwargs: raise ValueError(err.format(key)) # These may be given one way or the other, but not both if ( "timeout" in self.connect_kwargs and self.connect_timeout is not None ): raise ValueError(err.format("timeout")) # No conflicts -> merge 'em together kwargs = dict( self.connect_kwargs, username=self.user, hostname=self.host, port=self.port, ) if self.gateway: kwargs["sock"] = self.open_gateway() if self.connect_timeout: kwargs["timeout"] = self.connect_timeout # Strip out empty defaults for less noisy debugging if "key_filename" in kwargs and not kwargs["key_filename"]: del kwargs["key_filename"] # Actually connect! self.client.connect(**kwargs) self.transport = self.client.get_transport() def open_gateway(self): """ Obtain a socket-like object from `gateway`. :returns: A ``direct-tcpip`` `paramiko.channel.Channel`, if `gateway` was a `.Connection`; or a `~paramiko.proxy.ProxyCommand`, if `gateway` was a string. .. versionadded:: 2.0 """ # ProxyCommand is faster to set up, so do it first. if isinstance(self.gateway, string_types): # Leverage a dummy SSHConfig to ensure %h/%p/etc are parsed. # TODO: use real SSH config once loading one properly is # implemented. ssh_conf = SSHConfig() dummy = "Host {}\n ProxyCommand {}" ssh_conf.parse(StringIO(dummy.format(self.host, self.gateway))) return ProxyCommand(ssh_conf.lookup(self.host)["proxycommand"]) # Handle inner-Connection gateway type here. # TODO: logging self.gateway.open() # TODO: expose the opened channel itself as an attribute? (another # possible argument for separating the two gateway types...) e.g. if # someone wanted to piggyback on it for other same-interpreter socket # needs... # TODO: and the inverse? allow users to supply their own socket/like # object they got via $WHEREEVER? # TODO: how best to expose timeout param? reuse general connection # timeout from config? return self.gateway.transport.open_channel( kind="direct-tcpip", dest_addr=(self.host, int(self.port)), # NOTE: src_addr needs to be 'empty but not None' values to # correctly encode into a network message. Theoretically Paramiko # could auto-interpret None sometime & save us the trouble. src_addr=("", 0), ) def close(self): """ Terminate the network connection to the remote end, if open. If no connection is open, this method does nothing. .. versionadded:: 2.0 """ if self.is_connected: self.client.close() if self.forward_agent and self._agent_handler is not None: self._agent_handler.close() def __enter__(self): return self def __exit__(self, *exc): self.close() @opens def create_session(self): channel = self.transport.open_session() if self.forward_agent: self._agent_handler = AgentRequestHandler(channel) return channel def _remote_runner(self): return self.config.runners.remote(self, inline_env=self.inline_ssh_env) @opens def run(self, command, **kwargs): """ Execute a shell command on the remote end of this connection. This method wraps an SSH-capable implementation of `invoke.runners.Runner.run`; see its documentation for details. .. warning:: There are a few spots where Fabric departs from Invoke's default settings/behaviors; they are documented under `.Config.global_defaults`. .. versionadded:: 2.0 """ return self._run(self._remote_runner(), command, **kwargs) @opens def sudo(self, command, **kwargs): """ Execute a shell command, via ``sudo``, on the remote end. This method is identical to `invoke.context.Context.sudo` in every way, except in that -- like `run` -- it honors per-host/per-connection configuration overrides in addition to the generic/global ones. Thus, for example, per-host sudo passwords may be configured. .. versionadded:: 2.0 """ return self._sudo(self._remote_runner(), command, **kwargs) def local(self, *args, **kwargs): """ Execute a shell command on the local system. This method is effectively a wrapper of `invoke.run`; see its docs for details and call signature. .. versionadded:: 2.0 """ # Superclass run() uses runners.local, so we can literally just call it # straight. return super(Connection, self).run(*args, **kwargs) @opens def sftp(self): """ Return a `~paramiko.sftp_client.SFTPClient` object. If called more than one time, memoizes the first result; thus, any given `.Connection` instance will only ever have a single SFTP client, and state (such as that managed by `~paramiko.sftp_client.SFTPClient.chdir`) will be preserved. .. versionadded:: 2.0 """ if self._sftp is None: self._sftp = self.client.open_sftp() return self._sftp def get(self, *args, **kwargs): """ Get a remote file to the local filesystem or file-like object. Simply a wrapper for `.Transfer.get`. Please see its documentation for all details. .. versionadded:: 2.0 """ return Transfer(self).get(*args, **kwargs) def put(self, *args, **kwargs): """ Put a remote file (or file-like object) to the remote filesystem. Simply a wrapper for `.Transfer.put`. Please see its documentation for all details. .. versionadded:: 2.0 """ return Transfer(self).put(*args, **kwargs) # TODO: yield the socket for advanced users? Other advanced use cases # (perhaps factor out socket creation itself)? # TODO: probably push some of this down into Paramiko @contextmanager @opens def forward_local( self, local_port, remote_port=None, remote_host="localhost", local_host="localhost", ): """ Open a tunnel connecting ``local_port`` to the server's environment. For example, say you want to connect to a remote PostgreSQL database which is locked down and only accessible via the system it's running on. You have SSH access to this server, so you can temporarily make port 5432 on your local system act like port 5432 on the server:: import psycopg2 from fabric import Connection with Connection('my-db-server').forward_local(5432): db = psycopg2.connect( host='localhost', port=5432, database='mydb' ) # Do things with 'db' here This method is analogous to using the ``-L`` option of OpenSSH's ``ssh`` program. :param int local_port: The local port number on which to listen. :param int remote_port: The remote port number. Defaults to the same value as ``local_port``. :param str local_host: The local hostname/interface on which to listen. Default: ``localhost``. :param str remote_host: The remote hostname serving the forwarded remote port. Default: ``localhost`` (i.e., the host this `.Connection` is connected to.) :returns: Nothing; this method is only useful as a context manager affecting local operating system state. .. versionadded:: 2.0 """ if not remote_port: remote_port = local_port # TunnelManager does all of the work, sitting in the background (so we # can yield) and spawning threads every time somebody connects to our # local port. finished = Event() manager = TunnelManager( local_port=local_port, local_host=local_host, remote_port=remote_port, remote_host=remote_host, # TODO: not a huge fan of handing in our transport, but...? transport=self.transport, finished=finished, ) manager.start() # Return control to caller now that things ought to be operational try: yield # Teardown once user exits block finally: # Signal to manager that it should close all open tunnels finished.set() # Then wait for it to do so manager.join() # Raise threading errors from within the manager, which would be # one of: # - an inner ThreadException, which was created by the manager on # behalf of its Tunnels; this gets directly raised. # - some other exception, which would thus have occurred in the # manager itself; we wrap this in a new ThreadException. # NOTE: in these cases, some of the metadata tracking in # ExceptionHandlingThread/ExceptionWrapper/ThreadException (which # is useful when dealing with multiple nearly-identical sibling IO # threads) is superfluous, but it doesn't feel worth breaking # things up further; we just ignore it for now. wrapper = manager.exception() if wrapper is not None: if wrapper.type is ThreadException: raise wrapper.value else: raise ThreadException([wrapper]) # TODO: cancel port forward on transport? Does that even make sense # here (where we used direct-tcpip) vs the opposite method (which # is what uses forward-tcpip)? # TODO: probably push some of this down into Paramiko @contextmanager @opens def forward_remote( self, remote_port, local_port=None, remote_host="127.0.0.1", local_host="localhost", ): """ Open a tunnel connecting ``remote_port`` to the local environment. For example, say you're running a daemon in development mode on your workstation at port 8080, and want to funnel traffic to it from a production or staging environment. In most situations this isn't possible as your office/home network probably blocks inbound traffic. But you have SSH access to this server, so you can temporarily make port 8080 on that server act like port 8080 on your workstation:: from fabric import Connection c = Connection('my-remote-server') with c.forward_remote(8080): c.run("remote-data-writer --port 8080") # Assuming remote-data-writer runs until interrupted, this will # stay open until you Ctrl-C... This method is analogous to using the ``-R`` option of OpenSSH's ``ssh`` program. :param int remote_port: The remote port number on which to listen. :param int local_port: The local port number. Defaults to the same value as ``remote_port``. :param str local_host: The local hostname/interface the forwarded connection talks to. Default: ``localhost``. :param str remote_host: The remote interface address to listen on when forwarding connections. Default: ``127.0.0.1`` (i.e. only listen on the remote localhost). :returns: Nothing; this method is only useful as a context manager affecting local operating system state. .. versionadded:: 2.0 """ if not local_port: local_port = remote_port # Callback executes on each connection to the remote port and is given # a Channel hooked up to said port. (We don't actually care about the # source/dest host/port pairs at all; only whether the channel has data # to read and suchlike.) # We then pair that channel with a new 'outbound' socket connection to # the local host/port being forwarded, in a new Tunnel. # That Tunnel is then added to a shared data structure so we can track # & close them during shutdown. # # TODO: this approach is less than ideal because we have to share state # between ourselves & the callback handed into the transport's own # thread handling (which is roughly analogous to our self-controlled # TunnelManager for local forwarding). See if we can use more of # Paramiko's API (or improve it and then do so) so that isn't # necessary. tunnels = [] def callback(channel, src_addr_tup, dst_addr_tup): sock = socket.socket() # TODO: handle connection failure such that channel, etc get closed sock.connect((local_host, local_port)) # TODO: we don't actually need to generate the Events at our level, # do we? Just let Tunnel.__init__ do it; all we do is "press its # button" on shutdown... tunnel = Tunnel(channel=channel, sock=sock, finished=Event()) tunnel.start() # Communication between ourselves & the Paramiko handling subthread tunnels.append(tunnel) # Ask Paramiko (really, the remote sshd) to call our callback whenever # connections are established on the remote iface/port. # transport.request_port_forward(remote_host, remote_port, callback) try: self.transport.request_port_forward( address=remote_host, port=remote_port, handler=callback ) yield finally: # TODO: see above re: lack of a TunnelManager # TODO: and/or also refactor with TunnelManager re: shutdown logic. # E.g. maybe have a non-thread TunnelManager-alike with a method # that acts as the callback? At least then there's a tiny bit more # encapsulation...meh. for tunnel in tunnels: tunnel.finished.set() tunnel.join() self.transport.cancel_port_forward( address=remote_host, port=remote_port ) fabric-2.6.0/fabric/exceptions.py000066400000000000000000000012721400143053200167200ustar00rootroot00000000000000# TODO: this may want to move to Invoke if we can find a use for it there too? # Or make it _more_ narrowly focused and stay here? class NothingToDo(Exception): pass class GroupException(Exception): """ Lightweight exception wrapper for `.GroupResult` when one contains errors. .. versionadded:: 2.0 """ def __init__(self, result): #: The `.GroupResult` object which would have been returned, had there #: been no errors. See its docstring (and that of `.Group`) for #: details. self.result = result class InvalidV1Env(Exception): """ Raised when attempting to import a Fabric 1 ``env`` which is missing data. """ pass fabric-2.6.0/fabric/executor.py000066400000000000000000000126101400143053200163730ustar00rootroot00000000000000import invoke from invoke import Call, Task from .tasks import ConnectionCall from .exceptions import NothingToDo from .util import debug class Executor(invoke.Executor): """ `~invoke.executor.Executor` subclass which understands Fabric concepts. Designed to work in tandem with Fabric's `@task `/`~fabric.tasks.Task`, and is capable of acting on information stored on the resulting objects -- such as default host lists. This class is written to be backwards compatible with vanilla Invoke-level tasks, which it simply delegates to its superclass. Please see the parent class' `documentation ` for details on most public API members and object lifecycle. """ def normalize_hosts(self, hosts): """ Normalize mixed host-strings-or-kwarg-dicts into kwarg dicts only. In other words, transforms data taken from the CLI (--hosts, always strings) or decorator arguments (may be strings or kwarg dicts) into kwargs suitable for creating Connection instances. Subclasses may wish to override or extend this to perform, for example, database or custom config file lookups (vs this default behavior, which is to simply assume that strings are 'host' kwargs). :param hosts: Potentially heterogenous list of host connection values, as per the ``hosts`` param to `.task`. :returns: Homogenous list of Connection init kwarg dicts. """ dicts = [] for value in hosts or []: # Assume first posarg to Connection() if not already a dict. if not isinstance(value, dict): value = dict(host=value) dicts.append(value) return dicts def expand_calls(self, calls, apply_hosts=True): # Generate new call list with per-host variants & Connections inserted ret = [] cli_hosts = [] host_str = self.core[0].args.hosts.value if apply_hosts and host_str: cli_hosts = host_str.split(",") for call in calls: if isinstance(call, Task): call = Call(task=call) # TODO: expand this to allow multiple types of execution plans, # pending outcome of invoke#461 (which, if flexible enough to # handle intersect of dependencies+parameterization, just becomes # 'honor that new feature of Invoke') # TODO: roles, other non-runtime host parameterizations, etc # Pre-tasks get added only once, not once per host. ret.extend(self.expand_calls(call.pre, apply_hosts=False)) # Determine final desired host list based on CLI and task values # (with CLI, being closer to runtime, winning) and normalize to # Connection-init kwargs. call_hosts = getattr(call, "hosts", None) cxn_params = self.normalize_hosts(cli_hosts or call_hosts) # Main task, per host/connection for init_kwargs in cxn_params: ret.append(self.parameterize(call, init_kwargs)) # Deal with lack of hosts list (acts same as `inv` in that case) # TODO: no tests for this branch? if not cxn_params: ret.append(call) # Post-tasks added once, not once per host. ret.extend(self.expand_calls(call.post, apply_hosts=False)) # Add remainder as anonymous task if self.core.remainder: # TODO: this will need to change once there are more options for # setting host lists besides "-H or 100% within-task" if not cli_hosts: raise NothingToDo( "Was told to run a command, but not given any hosts to run it on!" # noqa ) def anonymous(c): c.run(self.core.remainder) anon = Call(Task(body=anonymous)) # TODO: see above TODOs about non-parameterized setups, roles etc # TODO: will likely need to refactor that logic some more so it can # be used both there and here. for init_kwargs in self.normalize_hosts(cli_hosts): ret.append(self.parameterize(anon, init_kwargs)) return ret def parameterize(self, call, connection_init_kwargs): """ Parameterize a Call with its Context set to a per-host Connection. :param call: The generic `.Call` being parameterized. :param connection_init_kwargs: The dict of `.Connection` init params/kwargs to attach to the resulting `.ConnectionCall`. :returns: `.ConnectionCall`. """ msg = "Parameterizing {!r} with Connection kwargs {!r}" debug(msg.format(call, connection_init_kwargs)) # Generate a custom ConnectionCall that has init_kwargs (used for # creating the Connection at runtime) set to the requested params. new_call_kwargs = dict(init_kwargs=connection_init_kwargs) clone = call.clone(into=ConnectionCall, with_=new_call_kwargs) return clone def dedupe(self, tasks): # Don't perform deduping, we will often have "duplicate" tasks w/ # distinct host values/etc. # TODO: might want some deduplication later on though - falls under # "how to mesh parameterization with pre/post/etc deduping". return tasks fabric-2.6.0/fabric/group.py000066400000000000000000000303221400143053200156710ustar00rootroot00000000000000try: from invoke.vendor.six.moves.queue import Queue except ImportError: from six.moves.queue import Queue from invoke.util import ExceptionHandlingThread from .connection import Connection from .exceptions import GroupException class Group(list): """ A collection of `.Connection` objects whose API operates on its contents. .. warning:: **This is a partially abstract class**; you need to use one of its concrete subclasses (such as `.SerialGroup` or `.ThreadingGroup`) or you'll get ``NotImplementedError`` on most of the methods. Most methods in this class wrap those of `.Connection` and will accept the same arguments; however their return values and exception-raising behavior differ: - Return values are dict-like objects (`.GroupResult`) mapping `.Connection` objects to the return value for the respective connections: `.Group.run` returns a map of `.Connection` to `.runners.Result`, `.Group.get` returns a map of `.Connection` to `.transfer.Result`, etc. - If any connections encountered exceptions, a `.GroupException` is raised, which is a thin wrapper around what would otherwise have been the `.GroupResult` returned; within that wrapped `.GroupResult`, the excepting connections map to the exception that was raised, in place of a ``Result`` (as no ``Result`` was obtained.) Any non-excepting connections will have a ``Result`` value, as normal. For example, when no exceptions occur, a session might look like this:: >>> group = SerialGroup('host1', 'host2') >>> group.run("this is fine") { : , : , } With exceptions (anywhere from 1 to "all of them"), it looks like so; note the different exception classes, e.g. `~invoke.exceptions.UnexpectedExit` for a completed session whose command exited poorly, versus `socket.gaierror` for a host that had DNS problems:: >>> group = SerialGroup('host1', 'host2', 'notahost') >>> group.run("will it blend?") { : , : , : gaierror(...), } As with `.Connection`, `.Group` objects may be used as context managers, which will automatically `.close` the object on block exit. .. versionadded:: 2.0 .. versionchanged:: 2.4 Added context manager behavior. """ def __init__(self, *hosts, **kwargs): """ Create a group of connections from one or more shorthand host strings. See `.Connection` for details on the format of these strings - they will be used as the first positional argument of `.Connection` constructors. Any keyword arguments given will be forwarded directly to those `.Connection` constructors as well. For example, to get a serially executing group object that connects to ``admin@host1``, ``admin@host2`` and ``admin@host3``, and forwards your SSH agent too:: group = SerialGroup( "host1", "host2", "host3", user="admin", forward_agent=True, ) .. versionchanged:: 2.3 Added ``**kwargs`` (was previously only ``*hosts``). """ # TODO: #563, #388 (could be here or higher up in Program area) self.extend([Connection(host, **kwargs) for host in hosts]) @classmethod def from_connections(cls, connections): """ Alternate constructor accepting `.Connection` objects. .. versionadded:: 2.0 """ # TODO: *args here too; or maybe just fold into __init__ and type # check? group = cls() group.extend(connections) return group def _do(self, method, *args, **kwargs): # TODO: rename this something public & commit to an API for user # subclasses raise NotImplementedError def run(self, *args, **kwargs): """ Executes `.Connection.run` on all member `Connections <.Connection>`. :returns: a `.GroupResult`. .. versionadded:: 2.0 """ # TODO: how to change method of execution across contents? subclass, # kwargs, additional methods, inject an executor? Doing subclass for # now, but not 100% sure it's the best route. # TODO: also need way to deal with duplicate connections (see THOUGHTS) return self._do("run", *args, **kwargs) def sudo(self, *args, **kwargs): """ Executes `.Connection.sudo` on all member `Connections <.Connection>`. :returns: a `.GroupResult`. .. versionadded:: 2.6 """ # TODO: see run() TODOs return self._do("sudo", *args, **kwargs) # TODO: this all needs to mesh well with similar strategies applied to # entire tasks - so that may still end up factored out into Executors or # something lower level than both those and these? # TODO: local? Invoke wants ability to do that on its own though, which # would be distinct from Group. (May want to switch Group to use that, # though, whatever it ends up being? Eg many cases where you do want to do # some local thing either N times identically, or parameterized by remote # cxn values) def put(self, *args, **kwargs): """ Executes `.Connection.put` on all member `Connections <.Connection>`. This is a straightforward application: aside from whatever the concrete group subclass does for concurrency or lack thereof, the effective result is like running a loop over the connections and calling their ``put`` method. :returns: a `.GroupResult` whose values are `.transfer.Result` instances. .. versionadded:: 2.6 """ return self._do("put", *args, **kwargs) def get(self, *args, **kwargs): """ Executes `.Connection.get` on all member `Connections <.Connection>`. .. note:: This method changes some behaviors over e.g. directly calling `.Connection.get` on a ``for`` loop of connections; the biggest is that the implied default value for the ``local`` parameter is ``"{host}/"``, which triggers use of local path parameterization based on each connection's target hostname. Thus, unless you override ``local`` yourself, a copy of the downloaded file will be stored in (relative) directories named after each host in the group. .. warning:: Using file-like objects as the ``local`` argument is not currently supported, as it would be equivalent to supplying that same object to a series of individual ``get()`` calls. :returns: a `.GroupResult` whose values are `.transfer.Result` instances. .. versionadded:: 2.6 """ # TODO: consider a backwards incompat change after we drop Py2 that # just makes a lot of these kwarg-only methods? then below could become # kwargs.setdefault() if desired. # TODO: do we care enough to handle explicitly given, yet falsey, # values? it's a lot more complexity for a corner case. if len(args) < 2 and "local" not in kwargs: kwargs["local"] = "{host}/" return self._do("get", *args, **kwargs) def close(self): """ Executes `.Connection.close` on all member `Connections <.Connection>`. .. versionadded:: 2.4 """ for cxn in self: cxn.close() def __enter__(self): return self def __exit__(self, *exc): self.close() class SerialGroup(Group): """ Subclass of `.Group` which executes in simple, serial fashion. .. versionadded:: 2.0 """ def _do(self, method, *args, **kwargs): results = GroupResult() excepted = False for cxn in self: try: results[cxn] = getattr(cxn, method)(*args, **kwargs) except Exception as e: results[cxn] = e excepted = True if excepted: raise GroupException(results) return results def thread_worker(cxn, queue, method, args, kwargs): result = getattr(cxn, method)(*args, **kwargs) # TODO: namedtuple or attrs object? queue.put((cxn, result)) class ThreadingGroup(Group): """ Subclass of `.Group` which uses threading to execute concurrently. .. versionadded:: 2.0 """ def _do(self, method, *args, **kwargs): results = GroupResult() queue = Queue() threads = [] for cxn in self: thread = ExceptionHandlingThread( target=thread_worker, kwargs=dict( cxn=cxn, queue=queue, method=method, args=args, kwargs=kwargs, ), ) threads.append(thread) for thread in threads: thread.start() for thread in threads: # TODO: configurable join timeout thread.join() # Get non-exception results from queue while not queue.empty(): # TODO: io-sleep? shouldn't matter if all threads are now joined cxn, result = queue.get(block=False) # TODO: outstanding musings about how exactly aggregate results # ought to ideally operate...heterogenous obj like this, multiple # objs, ?? results[cxn] = result # Get exceptions from the threads themselves. # TODO: in a non-thread setup, this would differ, e.g.: # - a queue if using multiprocessing # - some other state-passing mechanism if using e.g. coroutines # - ??? excepted = False for thread in threads: wrapper = thread.exception() if wrapper is not None: # Outer kwargs is Thread instantiation kwargs, inner is kwargs # passed to thread target/body. cxn = wrapper.kwargs["kwargs"]["cxn"] results[cxn] = wrapper.value excepted = True if excepted: raise GroupException(results) return results class GroupResult(dict): """ Collection of results and/or exceptions arising from `.Group` methods. Acts like a dict, but adds a couple convenience methods, to wit: - Keys are the individual `.Connection` objects from within the `.Group`. - Values are either return values / results from the called method (e.g. `.runners.Result` objects), *or* an exception object, if one prevented the method from returning. - Subclasses `dict`, so has all dict methods. - Has `.succeeded` and `.failed` attributes containing sub-dicts limited to just those key/value pairs that succeeded or encountered exceptions, respectively. - Of note, these attributes allow high level logic, e.g. ``if mygroup.run('command').failed`` and so forth. .. versionadded:: 2.0 """ def __init__(self, *args, **kwargs): super(dict, self).__init__(*args, **kwargs) self._successes = {} self._failures = {} def _bifurcate(self): # Short-circuit to avoid reprocessing every access. if self._successes or self._failures: return # TODO: if we ever expect .succeeded/.failed to be useful before a # GroupResult is fully initialized, this needs to become smarter. for key, value in self.items(): if isinstance(value, BaseException): self._failures[key] = value else: self._successes[key] = value @property def succeeded(self): """ A sub-dict containing only successful results. .. versionadded:: 2.0 """ self._bifurcate() return self._successes @property def failed(self): """ A sub-dict containing only failed results. .. versionadded:: 2.0 """ self._bifurcate() return self._failures fabric-2.6.0/fabric/main.py000066400000000000000000000147771400143053200155010ustar00rootroot00000000000000""" CLI entrypoint & parser configuration. Builds on top of Invoke's core functionality for same. """ import getpass from invoke import Argument, Collection, Program from invoke import __version__ as invoke from paramiko import __version__ as paramiko from . import __version__ as fabric from . import Config, Executor class Fab(Program): def print_version(self): super(Fab, self).print_version() print("Paramiko {}".format(paramiko)) print("Invoke {}".format(invoke)) def core_args(self): core_args = super(Fab, self).core_args() my_args = [ Argument( names=("H", "hosts"), help="Comma-separated host name(s) to execute tasks against.", ), Argument( names=("i", "identity"), kind=list, # Same as OpenSSH, can give >1 key # TODO: automatically add hint about iterable-ness to Invoke # help display machinery? help="Path to runtime SSH identity (key) file. May be given multiple times.", # noqa ), # TODO: worth having short flags for these prompt args? Argument( names=("prompt-for-login-password",), kind=bool, help="Request an upfront SSH-auth password prompt.", ), Argument( names=("prompt-for-passphrase",), kind=bool, help="Request an upfront SSH key passphrase prompt.", ), Argument( names=("S", "ssh-config"), help="Path to runtime SSH config file.", ), Argument( names=("t", "connect-timeout"), kind=int, help="Specifies default connection timeout, in seconds.", ), ] return core_args + my_args @property def _remainder_only(self): # No 'unparsed' (i.e. tokens intended for task contexts), and remainder # (text after a double-dash) implies a contextless/taskless remainder # execution of the style 'fab -H host -- command'. # NOTE: must ALSO check to ensure the double dash isn't being used for # tab completion machinery... return ( not self.core.unparsed and self.core.remainder and not self.args.complete.value ) def load_collection(self): # Stick in a dummy Collection if it looks like we were invoked w/o any # tasks, and with a remainder. # This isn't super ideal, but Invoke proper has no obvious "just run my # remainder" use case, so having it be capable of running w/o any task # module, makes no sense. But we want that capability for testing & # things like 'fab -H x,y,z -- mycommand'. if self._remainder_only: # TODO: hm we're probably not honoring project-specific configs in # this branch; is it worth having it assume CWD==project, since # that's often what users expect? Even tho no task collection to # honor the real "lives by task coll"? self.collection = Collection() else: super(Fab, self).load_collection() def no_tasks_given(self): # As above, neuter the usual "hey you didn't give me any tasks, let me # print help for you" behavior, if necessary. if not self._remainder_only: super(Fab, self).no_tasks_given() def create_config(self): # Create config, as parent does, but with lazy=True to avoid our own # SSH config autoload. (Otherwise, we can't correctly load _just_ the # runtime file if one's being given later.) self.config = self.config_class(lazy=True) # However, we don't really want the parent class' lazy behavior (which # skips loading system/global invoke-type conf files) so we manually do # that here to match upstream behavior. self.config.load_base_conf_files() # And merge again so that data is available. # TODO: really need to either A) stop giving fucks about calling # merge() "too many times", or B) make merge() itself determine whether # it needs to run and/or just merge stuff that's changed, so log spam # isn't as bad. self.config.merge() def update_config(self): # Note runtime SSH path, if given, and load SSH configurations. # NOTE: must do parent before our work, in case users want to disable # SSH config loading within a runtime-level conf file/flag. super(Fab, self).update_config(merge=False) self.config.set_runtime_ssh_path(self.args["ssh-config"].value) self.config.load_ssh_config() # Load -i identity file, if given, into connect_kwargs, at overrides # level. # TODO: this feels a little gross, but since the parent has already # called load_overrides, this is best we can do for now w/o losing # data. Still feels correct; just might be cleaner to have even more # Config API members around this sort of thing. Shrug. connect_kwargs = {} path = self.args["identity"].value if path: connect_kwargs["key_filename"] = path # Ditto for connect timeout timeout = self.args["connect-timeout"].value if timeout: connect_kwargs["timeout"] = timeout # Secrets prompts that want to happen at handoff time instead of # later/at user-time. # TODO: should this become part of Invoke proper in case other # downstreams have need of it? E.g. a prompt Argument 'type'? We're # already doing a similar thing there for sudo password... if self.args["prompt-for-login-password"].value: prompt = "Enter login password for use with SSH auth: " connect_kwargs["password"] = getpass.getpass(prompt) if self.args["prompt-for-passphrase"].value: prompt = "Enter passphrase for use unlocking SSH keys: " connect_kwargs["passphrase"] = getpass.getpass(prompt) self.config._overrides["connect_kwargs"] = connect_kwargs # Since we gave merge=False above, we must do it ourselves here. (Also # allows us to 'compile' our overrides manipulation.) self.config.merge() # Mostly a concession to testing. def make_program(): return Fab( name="Fabric", version=fabric, executor_class=Executor, config_class=Config, ) program = make_program() fabric-2.6.0/fabric/runners.py000066400000000000000000000130401400143053200162270ustar00rootroot00000000000000from invoke import Runner, pty_size, Result as InvokeResult class Remote(Runner): """ Run a shell command over an SSH connection. This class subclasses `invoke.runners.Runner`; please see its documentation for most public API details. .. note:: `.Remote`'s ``__init__`` method expects a `.Connection` (or subclass) instance for its ``context`` argument. .. versionadded:: 2.0 """ def __init__(self, *args, **kwargs): """ Thin wrapper for superclass' ``__init__``; please see it for details. Additional keyword arguments defined here are listed below. :param bool inline_env: Whether to 'inline' shell env vars as prefixed parameters, instead of trying to submit them via `.Channel.update_environment`. Default:: ``False``. .. versionchanged:: 2.3 Added the ``inline_env`` parameter. """ self.inline_env = kwargs.pop("inline_env", None) super(Remote, self).__init__(*args, **kwargs) def start(self, command, shell, env, timeout=None): self.channel = self.context.create_session() if self.using_pty: rows, cols = pty_size() self.channel.get_pty(width=rows, height=cols) if env: # TODO: honor SendEnv from ssh_config (but if we do, _should_ we # honor it even when prefixing? That would depart from OpenSSH # somewhat (albeit as a "what we can do that it cannot" feature...) if self.inline_env: # TODO: escaping, if we can find a FOOLPROOF THIRD PARTY METHOD # for doing so! # TODO: switch to using a higher-level generic command # prefixing functionality, when implemented. parameters = " ".join( ["{}={}".format(k, v) for k, v in sorted(env.items())] ) # NOTE: we can assume 'export' and '&&' relatively safely, as # sshd always brings some shell into play, even if it's just # /bin/sh. command = "export {} && {}".format(parameters, command) else: self.channel.update_environment(env) self.channel.exec_command(command) def read_proc_stdout(self, num_bytes): return self.channel.recv(num_bytes) def read_proc_stderr(self, num_bytes): return self.channel.recv_stderr(num_bytes) def _write_proc_stdin(self, data): return self.channel.sendall(data) def close_proc_stdin(self): return self.channel.shutdown_write() @property def process_is_finished(self): return self.channel.exit_status_ready() def send_interrupt(self, interrupt): # NOTE: in v1, we just reraised the KeyboardInterrupt unless a PTY was # present; this seems to have been because without a PTY, the # below escape sequence is ignored, so all we can do is immediately # terminate on our end. # NOTE: also in v1, the raising of the KeyboardInterrupt completely # skipped all thread joining & cleanup; presumably regular interpreter # shutdown suffices to tie everything off well enough. if self.using_pty: # Submit hex ASCII character 3, aka ETX, which most Unix PTYs # interpret as a foreground SIGINT. # TODO: is there anything else we can do here to be more portable? self.channel.send(u"\x03") else: raise interrupt def returncode(self): return self.channel.recv_exit_status() def generate_result(self, **kwargs): kwargs["connection"] = self.context return Result(**kwargs) def stop(self): if hasattr(self, "channel"): self.channel.close() def kill(self): # Just close the channel immediately, which is about as close as we can # get to a local SIGKILL unfortunately. # TODO: consider _also_ calling .send_interrupt() and only doing this # after another few seconds; but A) kinda fragile/complex and B) would # belong in invoke.Runner anyways? self.channel.close() # TODO: shit that is in fab 1 run() but could apply to invoke.Local too: # * see rest of stuff in _run_command/_execute in operations.py...there is # a bunch that applies generally like optional exit codes, etc # TODO: general shit not done yet # * stdin; Local relies on local process management to ensure stdin is # hooked up; we cannot do that. # * output prefixing # * agent forwarding # * reading at 4096 bytes/time instead of whatever inv defaults to (also, # document why we are doing that, iirc it changed recentlyish via ticket) # * TODO: oh god so much more, go look it up # TODO: shit that has no Local equivalent that we probs need to backfill # into Runner, probably just as a "finish()" or "stop()" (to mirror # start()): # * channel close() # * agent-forward close() class Result(InvokeResult): """ An `invoke.runners.Result` exposing which `.Connection` was run against. Exposes all attributes from its superclass, then adds a ``.connection``, which is simply a reference to the `.Connection` whose method yielded this result. .. versionadded:: 2.0 """ def __init__(self, **kwargs): connection = kwargs.pop("connection") super(Result, self).__init__(**kwargs) self.connection = connection # TODO: have useful str/repr differentiation from invoke.Result, # transfer.Result etc. fabric-2.6.0/fabric/tasks.py000066400000000000000000000111641400143053200156650ustar00rootroot00000000000000import invoke from .connection import Connection class Task(invoke.Task): """ Extends `invoke.tasks.Task` with knowledge of target hosts and similar. As `invoke.tasks.Task` relegates documentation responsibility to its `@task ` expression, so we relegate most details to our version of `@task ` - please see its docs for details. .. versionadded:: 2.1 """ def __init__(self, *args, **kwargs): # Pull out our own kwargs before hitting super, which will TypeError on # anything it doesn't know about. self.hosts = kwargs.pop("hosts", None) super(Task, self).__init__(*args, **kwargs) def task(*args, **kwargs): """ Wraps/extends Invoke's `@task ` with extra kwargs. See `the Invoke-level API docs ` for most details; this Fabric-specific implementation adds the following additional keyword arguments: :param hosts: An iterable of host-connection specifiers appropriate for eventually instantiating a `.Connection`. The existence of this argument will trigger automatic parameterization of the task when invoked from the CLI, similar to the behavior of :option:`--hosts`. .. note:: This parameterization is "lower-level" than that driven by :option:`--hosts`: if a task decorated with this parameter is executed in a session where :option:`--hosts` was given, the CLI-driven value will win out. List members may be one of: - A string appropriate for being the first positional argument to `.Connection` - see its docs for details, but these are typically shorthand-only convenience strings like ``hostname.example.com`` or ``user@host:port``. - A dictionary appropriate for use as keyword arguments when instantiating a `.Connection`. Useful for values that don't mesh well with simple strings (e.g. statically defined IPv6 addresses) or to bake in more complex info (eg ``connect_timeout``, ``connect_kwargs`` params like auth info, etc). These two value types *may* be mixed together in the same list, though we recommend that you keep things homogenous when possible, to avoid confusion when debugging. .. note:: No automatic deduplication of values is performed; if you pass in multiple references to the same effective target host, the wrapped task will execute on that host multiple times (including making separate connections). .. versionadded:: 2.1 """ # Override klass to be our own Task, not Invoke's, unless somebody gave it # explicitly. kwargs.setdefault("klass", Task) return invoke.task(*args, **kwargs) class ConnectionCall(invoke.Call): """ Subclass of `invoke.tasks.Call` that generates `Connections <.Connection>`. """ def __init__(self, *args, **kwargs): """ Creates a new `.ConnectionCall`. Performs minor extensions to `~invoke.tasks.Call` -- see its docstring for most details. Only specific-to-subclass params are documented here. :param dict init_kwargs: Keyword arguments used to create a new `.Connection` when the wrapped task is executed. Default: ``None``. """ init_kwargs = kwargs.pop("init_kwargs") # , None) super(ConnectionCall, self).__init__(*args, **kwargs) self.init_kwargs = init_kwargs def clone_kwargs(self): # Extend superclass clone_kwargs to work in init_kwargs. # TODO: this pattern comes up a lot; is there a better way to handle it # without getting too crazy on the metaprogramming/over-engineering? # Maybe something attrs library can help with (re: declaring "These are # my bag-of-attributes attributes I want common stuff done to/with") kwargs = super(ConnectionCall, self).clone_kwargs() kwargs["init_kwargs"] = self.init_kwargs return kwargs def make_context(self, config): kwargs = self.init_kwargs # TODO: what about corner case of a decorator giving config in a hosts # kwarg member?! For now let's stomp on it, and then if somebody runs # into it, we can identify the use case & decide how best to deal. kwargs["config"] = config return Connection(**kwargs) def __repr__(self): ret = super(ConnectionCall, self).__repr__() if self.init_kwargs: ret = ret[:-1] + ", host='{}'>".format(self.init_kwargs["host"]) return ret fabric-2.6.0/fabric/testing/000077500000000000000000000000001400143053200156405ustar00rootroot00000000000000fabric-2.6.0/fabric/testing/__init__.py000066400000000000000000000000001400143053200177370ustar00rootroot00000000000000fabric-2.6.0/fabric/testing/base.py000066400000000000000000000347311400143053200171340ustar00rootroot00000000000000""" This module contains helpers/fixtures to assist in testing Fabric-driven code. It is not intended for production use, and pulls in some test-oriented dependencies such as `mock `_. You can install an 'extra' variant of Fabric to get these dependencies if you aren't already using them for your own testing purposes: ``pip install fabric[testing]``. .. note:: If you're using pytest for your test suite, you may be interested in grabbing ``fabric[pytest]`` instead, which encompasses the dependencies of both this module and the `fabric.testing.fixtures` module, which contains pytest fixtures. .. versionadded:: 2.1 """ from itertools import chain, repeat from io import BytesIO import os try: from mock import Mock, PropertyMock, call, patch, ANY except ImportError: import warnings warning = ( "You appear to be missing some optional test-related dependencies;" "please 'pip install fabric[testing]'." ) warnings.warn(warning, ImportWarning) raise class Command(object): """ Data record specifying params of a command execution to mock/expect. :param str cmd: Command string to expect. If not given, no expectations about the command executed will be set up. Default: ``None``. :param bytes out: Data yielded as remote stdout. Default: ``b""``. :param bytes err: Data yielded as remote stderr. Default: ``b""``. :param int exit: Remote exit code. Default: ``0``. :param int waits: Number of calls to the channel's ``exit_status_ready`` that should return ``False`` before it then returns ``True``. Default: ``0`` (``exit_status_ready`` will return ``True`` immediately). .. versionadded:: 2.1 """ def __init__(self, cmd=None, out=b"", err=b"", in_=None, exit=0, waits=0): self.cmd = cmd self.out = out self.err = err self.in_ = in_ self.exit = exit self.waits = waits def __repr__(self): # TODO: just leverage attrs, maybe vendored into Invoke so we don't # grow more dependencies? Ehhh return "<{} cmd={!r}>".format(self.__class__.__name__, self.cmd) class MockChannel(Mock): """ Mock subclass that tracks state for its ``recv(_stderr)?`` methods. Turns out abusing function closures inside MockRemote to track this state only worked for 1 command per session! .. versionadded:: 2.1 """ def __init__(self, *args, **kwargs): # TODO: worth accepting strings and doing the BytesIO setup ourselves? # Stored privately to avoid any possible collisions ever. shrug. object.__setattr__(self, "__stdout", kwargs.pop("stdout")) object.__setattr__(self, "__stderr", kwargs.pop("stderr")) # Stdin less private so it can be asserted about object.__setattr__(self, "_stdin", BytesIO()) super(MockChannel, self).__init__(*args, **kwargs) def _get_child_mock(self, **kwargs): # Don't return our own class on sub-mocks. return Mock(**kwargs) def recv(self, count): return object.__getattribute__(self, "__stdout").read(count) def recv_stderr(self, count): return object.__getattribute__(self, "__stderr").read(count) def sendall(self, data): return object.__getattribute__(self, "_stdin").write(data) class Session(object): """ A mock remote session of a single connection and 1 or more command execs. Allows quick configuration of expected remote state, and also helps generate the necessary test mocks used by `MockRemote` itself. Only useful when handed into `MockRemote`. The parameters ``cmd``, ``out``, ``err``, ``exit`` and ``waits`` are all shorthand for the same constructor arguments for a single anonymous `.Command`; see `.Command` for details. To give fully explicit `.Command` objects, use the ``commands`` parameter. :param str user: :param str host: :param int port: Sets up expectations that a connection will be generated to the given user, host and/or port. If ``None`` (default), no expectations are generated / any value is accepted. :param commands: Iterable of `.Command` objects, used when mocking nontrivial sessions involving >1 command execution per host. Default: ``None``. .. note:: Giving ``cmd``, ``out`` etc alongside explicit ``commands`` is not allowed and will result in an error. .. versionadded:: 2.1 """ def __init__( self, host=None, user=None, port=None, commands=None, cmd=None, out=None, in_=None, err=None, exit=None, waits=None, ): # Sanity check params = cmd or out or err or exit or waits if commands and params: raise ValueError( "You can't give both 'commands' and individual " "Command parameters!" ) # noqa # Fill in values self.host = host self.user = user self.port = port self.commands = commands if params: # Honestly dunno which is dumber, this or duplicating Command's # default kwarg values in this method's signature...sigh kwargs = {} if cmd is not None: kwargs["cmd"] = cmd if out is not None: kwargs["out"] = out if err is not None: kwargs["err"] = err if in_ is not None: kwargs["in_"] = in_ if exit is not None: kwargs["exit"] = exit if waits is not None: kwargs["waits"] = waits self.commands = [Command(**kwargs)] if not self.commands: self.commands = [Command()] def generate_mocks(self): """ Mocks `~paramiko.client.SSHClient` and `~paramiko.channel.Channel`. Specifically, the client will expect itself to be connected to ``self.host`` (if given), the channels will be associated with the client's `~paramiko.transport.Transport`, and the channels will expect/provide command-execution behavior as specified on the `.Command` objects supplied to this `.Session`. The client is then attached as ``self.client`` and the channels as ``self.channels``. :returns: ``None`` - this is mostly a "deferred setup" method and callers will just reference the above attributes (and call more methods) as needed. .. versionadded:: 2.1 """ client = Mock() transport = client.get_transport.return_value # another Mock # NOTE: this originally did chain([False], repeat(True)) so that # get_transport().active was False initially, then True. However, # because we also have to consider when get_transport() comes back None # (which it does initially), the case where we get back a non-None # transport _and_ it's not active yet, isn't useful to test, and # complicates text expectations. So we don't, for now. actives = repeat(True) # NOTE: setting PropertyMocks on a mock's type() is apparently # How It Must Be Done, otherwise it sets the real attr value. type(transport).active = PropertyMock(side_effect=actives) channels = [] for command in self.commands: # Mock of a Channel instance, not e.g. Channel-the-class. # Specifically, one that can track individual state for recv*(). channel = MockChannel( stdout=BytesIO(command.out), stderr=BytesIO(command.err) ) channel.recv_exit_status.return_value = command.exit # If requested, make exit_status_ready return False the first N # times it is called in the wait() loop. readies = chain(repeat(False, command.waits), repeat(True)) channel.exit_status_ready.side_effect = readies channels.append(channel) # Have our transport yield those channel mocks in order when # open_session() is called. transport.open_session.side_effect = channels self.client = client self.channels = channels def sanity_check(self): # Per-session we expect a single transport get transport = self.client.get_transport transport.assert_called_once_with() # And a single connect to our target host. self.client.connect.assert_called_once_with( username=self.user or ANY, hostname=self.host or ANY, port=self.port or ANY, ) # Calls to open_session will be 1-per-command but are on transport, not # channel, so we can only really inspect how many happened in # aggregate. Save a list for later comparison to call_args. session_opens = [] for channel, command in zip(self.channels, self.commands): # Expect an open_session for each command exec session_opens.append(call()) # Expect that the channel gets an exec_command channel.exec_command.assert_called_with(command.cmd or ANY) # Expect written stdin, if given if command.in_: assert channel._stdin.getvalue() == command.in_ # Make sure open_session was called expected number of times. calls = transport.return_value.open_session.call_args_list assert calls == session_opens class MockRemote(object): """ Class representing mocked remote state. By default this class is set up for start/stop style patching as opposed to the more common context-manager or decorator approach; this is so it can be used in situations requiring setup/teardown semantics. Defaults to setting up a single anonymous `Session`, so it can be used as a "request & forget" pytest fixture. Users requiring detailed remote session expectations can call methods like `expect`, which wipe that anonymous Session & set up a new one instead. .. versionadded:: 2.1 """ def __init__(self): self.expect_sessions(Session()) # TODO: make it easier to assume single session w/ >1 command? def expect(self, *args, **kwargs): """ Convenience method for creating & 'expect'ing a single `Session`. Returns the single `MockChannel` yielded by that Session. .. versionadded:: 2.1 """ return self.expect_sessions(Session(*args, **kwargs))[0] def expect_sessions(self, *sessions): """ Sets the mocked remote environment to expect the given ``sessions``. Returns a list of `MockChannel` objects, one per input `Session`. .. versionadded:: 2.1 """ # First, stop the default session to clean up its state, if it seems to # be running. self.stop() # Update sessions list with new session(s) self.sessions = sessions # And start patching again, returning mocked channels return self.start() def start(self): """ Start patching SSHClient with the stored sessions, returning channels. .. versionadded:: 2.1 """ # Patch SSHClient so the sessions' generated mocks can be set as its # return values self.patcher = patcher = patch("fabric.connection.SSHClient") SSHClient = patcher.start() # Mock clients, to be inspected afterwards during sanity-checks clients = [] for session in self.sessions: session.generate_mocks() clients.append(session.client) # Each time the mocked SSHClient class is instantiated, it will # yield one of our mocked clients (w/ mocked transport & channel) # generated above. SSHClient.side_effect = clients return list(chain.from_iterable(x.channels for x in self.sessions)) def stop(self): """ Stop patching SSHClient. .. versionadded:: 2.1 """ # Short circuit if we don't seem to have start()ed yet. if not hasattr(self, "patcher"): return # Stop patching SSHClient self.patcher.stop() def sanity(self): """ Run post-execution sanity checks (usually 'was X called' tests.) .. versionadded:: 2.1 """ for session in self.sessions: # Basic sanity tests about transport, channel etc session.sanity_check() # TODO: unify with the stuff in paramiko itself (now in its tests/conftest.py), # they're quite distinct and really shouldn't be. class MockSFTP(object): """ Class managing mocked SFTP remote state. Used in start/stop fashion in eg doctests; wrapped in the SFTP fixtures in conftest.py for main use. .. versionadded:: 2.1 """ def __init__(self, autostart=True): if autostart: self.start() def start(self): # Set up mocks self.os_patcher = patch("fabric.transfer.os") self.client_patcher = patch("fabric.connection.SSHClient") self.path_patcher = patch("fabric.transfer.Path") mock_os = self.os_patcher.start() Client = self.client_patcher.start() self.path_patcher.start() sftp = Client.return_value.open_sftp.return_value # Handle common filepath massage actions; tests will assume these. def fake_abspath(path): # Run normpath to avoid tests not seeing abspath wrinkles (like # trailing slash chomping) return "/local/{}".format(os.path.normpath(path)) mock_os.path.abspath.side_effect = fake_abspath sftp.getcwd.return_value = "/remote" # Ensure stat st_mode is a real number; Python 2 stat.S_IMODE doesn't # appear to care if it's handed a MagicMock, but Python 3's does (?!) fake_mode = 0o644 # arbitrary real-ish mode sftp.stat.return_value.st_mode = fake_mode mock_os.stat.return_value.st_mode = fake_mode # Not super clear to me why the 'wraps' functionality in mock isn't # working for this :( reinstate a bunch of os(.path) so it still works mock_os.sep = os.sep for name in ("basename", "split", "join", "normpath"): getattr(mock_os.path, name).side_effect = getattr(os.path, name) # Return the sftp and OS mocks for use by decorator use case. return sftp, mock_os def stop(self): self.os_patcher.stop() self.client_patcher.stop() self.path_patcher.stop() fabric-2.6.0/fabric/testing/fixtures.py000066400000000000000000000126441400143053200200720ustar00rootroot00000000000000""" `pytest `_ fixtures for easy use of Fabric test helpers. To get Fabric plus this module's dependencies (as well as those of the main `fabric.testing.base` module which these fixtures wrap), ``pip install fabric[pytest]``. The simplest way to get these fixtures loaded into your test suite so Pytest notices them is to import them into a ``conftest.py`` (`docs `_). For example, if you intend to use the `remote` and `client` fixtures:: from fabric.testing.fixtures import client, remote .. versionadded:: 2.1 """ try: from pytest import fixture from mock import patch, Mock except ImportError: import warnings warning = ( "You appear to be missing some optional test-related dependencies;" "please 'pip install fabric[pytest]'." ) warnings.warn(warning, ImportWarning) raise from .. import Connection from ..transfer import Transfer # TODO: if we find a lot of people somehow ending up _with_ pytest but # _without_ mock and other deps from testing.base, consider doing the # try/except here too. But, really? from .base import MockRemote, MockSFTP @fixture def connection(): """ Yields a `.Connection` object with mocked methods. Specifically: - the hostname is set to ``"host"`` and the username to ``"user"``; - the primary API members (`.Connection.run`, `.Connection.local`, etc) are replaced with ``mock.Mock`` instances; - the ``run.in_stream`` config option is set to ``False`` to avoid attempts to read from stdin (which typically plays poorly with pytest and other capturing test runners); .. versionadded:: 2.1 """ c = Connection(host="host", user="user") c.config.run.in_stream = False c.run = Mock() c.local = Mock() # TODO: rest of API should get mocked too # TODO: is there a nice way to mesh with MockRemote et al? Is that ever # really that useful for code that just wants to assert about how run() and # friends were called? yield c #: A convenience rebinding of `connection`. #: #: .. versionadded:: 2.1 cxn = connection @fixture def remote(): """ Fixture allowing setup of a mocked remote session & access to sub-mocks. Yields a `.MockRemote` object (which may need to be updated via `.MockRemote.expect`, `.MockRemote.expect_sessions`, etc; otherwise a default session will be used) & calls `.MockRemote.sanity` and `.MockRemote.stop` on teardown. .. versionadded:: 2.1 """ remote = MockRemote() yield remote remote.sanity() remote.stop() @fixture def sftp(): """ Fixture allowing setup of a mocked remote SFTP session. Yields a 3-tuple of: Transfer() object, SFTPClient object, and mocked OS module. For many/most tests which only want the Transfer and/or SFTPClient objects, see `sftp_objs` and `transfer` which wrap this fixture. .. versionadded:: 2.1 """ mock = MockSFTP(autostart=False) client, mock_os = mock.start() # Regular ol transfer to save some time transfer = Transfer(Connection("host")) yield transfer, client, mock_os # TODO: old mock_sftp() lacked any 'stop'...why? feels bad man @fixture def sftp_objs(sftp): """ Wrapper for `sftp` which only yields the Transfer and SFTPClient. .. versionadded:: 2.1 """ yield sftp[:2] @fixture def transfer(sftp): """ Wrapper for `sftp` which only yields the Transfer object. .. versionadded:: 2.1 """ yield sftp[0] @fixture def client(): """ Mocks `~paramiko.client.SSHClient` for testing calls to ``connect()``. Yields a mocked ``SSHClient`` instance. This fixture updates `~paramiko.client.SSHClient.get_transport` to return a mock that appears active on first check, then inactive after, matching most tests' needs by default: - `.Connection` instantiates, with a None ``.transport``. - Calls to ``.open()`` test ``.is_connected``, which returns ``False`` when ``.transport`` is falsey, and so the first open will call ``SSHClient.connect`` regardless. - ``.open()`` then sets ``.transport`` to ``SSHClient.get_transport()``, so ``Connection.transport`` is effectively ``client.get_transport.return_value``. - Subsequent activity will want to think the mocked SSHClient is "connected", meaning we want the mocked transport's ``.active`` to be ``True``. - This includes `.Connection.close`, which short-circuits if ``.is_connected``; having a statically ``True`` active flag means a full open -> close cycle will run without error. (Only tests that double-close or double-open should have issues here.) End result is that: - ``.is_connected`` behaves False after instantiation and before ``.open``, then True after ``.open`` - ``.close`` will work normally on 1st call - ``.close`` will behave "incorrectly" on subsequent calls (since it'll think connection is still live.) Tests that check the idempotency of ``.close`` will need to tweak their mock mid-test. For 'full' fake remote session interaction (i.e. stdout/err reading/writing, channel opens, etc) see `remote`. .. versionadded:: 2.1 """ with patch("fabric.connection.SSHClient") as SSHClient: client = SSHClient.return_value client.get_transport.return_value = Mock(active=True) yield client fabric-2.6.0/fabric/transfer.py000066400000000000000000000347631400143053200163760ustar00rootroot00000000000000""" File transfer via SFTP and/or SCP. """ import os import posixpath import stat try: from pathlib import Path except ImportError: from pathlib2 import Path from .util import debug # TODO: actual logging! LOL # TODO: figure out best way to direct folks seeking rsync, to patchwork's rsync # call (which needs updating to use invoke.run() & fab 2 connection methods, # but is otherwise suitable). # UNLESS we want to try and shoehorn it into this module after all? Delegate # any recursive get/put to it? Requires users to have rsync available of # course. class Transfer(object): """ `.Connection`-wrapping class responsible for managing file upload/download. .. versionadded:: 2.0 """ # TODO: SFTP clear default, but how to do SCP? subclass? init kwarg? def __init__(self, connection): self.connection = connection @property def sftp(self): return self.connection.sftp() def is_remote_dir(self, path): try: return stat.S_ISDIR(self.sftp.stat(path).st_mode) except IOError: return False def get(self, remote, local=None, preserve_mode=True): """ Copy a file from wrapped connection's host to the local filesystem. :param str remote: Remote file to download. May be absolute, or relative to the remote working directory. .. note:: Most SFTP servers set the remote working directory to the connecting user's home directory, and (unlike most shells) do *not* expand tildes (``~``). For example, instead of saying ``get("~/tmp/archive.tgz")``, say ``get("tmp/archive.tgz")``. :param local: Local path to store downloaded file in, or a file-like object. **If None or another 'falsey'/empty value is given** (the default), the remote file is downloaded to the current working directory (as seen by `os.getcwd`) using its remote filename. (This is equivalent to giving ``"{basename}"``; see the below subsection on interpolation.) **If a string is given**, it should be a path to a local directory or file and is subject to similar behavior as that seen by common Unix utilities or OpenSSH's ``sftp`` or ``scp`` tools. For example, if the local path is a directory, the remote path's base filename will be added onto it (so ``get('foo/bar/file.txt', '/tmp/')`` would result in creation or overwriting of ``/tmp/file.txt``). This path will be **interpolated** with some useful parameters, using `str.format`: - The `.Connection` object's ``host``, ``user`` and ``port`` attributes. - The ``basename`` and ``dirname`` of the ``remote`` path, as derived by `os.path` (specifically, its ``posixpath`` flavor, so that the resulting values are useful on remote POSIX-compatible SFTP servers even if the local client is Windows). - Thus, for example, ``"/some/path/{user}@{host}/{basename}"`` will yield different local paths depending on the properties of both the connection and the remote path. .. note:: If nonexistent directories are present in this path (including the final path component, if it ends in `os.sep`) they will be created automatically using `os.makedirs`. **If a file-like object is given**, the contents of the remote file are simply written into it. :param bool preserve_mode: Whether to `os.chmod` the local file so it matches the remote file's mode (default: ``True``). :returns: A `.Result` object. .. versionadded:: 2.0 .. versionchanged:: 2.6 Added ``local`` path interpolation of connection & remote file attributes. .. versionchanged:: 2.6 Create missing ``local`` directories automatically. """ # TODO: how does this API change if we want to implement # remote-to-remote file transfer? (Is that even realistic?) # TODO: callback support # TODO: how best to allow changing the behavior/semantics of # remote/local (e.g. users might want 'safer' behavior that complains # instead of overwriting existing files) - this likely ties into the # "how to handle recursive/rsync" and "how to handle scp" questions # Massage remote path if not remote: raise ValueError("Remote path must not be empty!") orig_remote = remote remote = posixpath.join( self.sftp.getcwd() or self.sftp.normalize("."), remote ) # Massage local path orig_local = local is_file_like = hasattr(local, "write") and callable(local.write) remote_filename = posixpath.basename(remote) if not local: local = remote_filename # Path-driven local downloads need interpolation, abspath'ing & # directory creation if not is_file_like: local = local.format( host=self.connection.host, user=self.connection.user, port=self.connection.port, dirname=posixpath.dirname(remote), basename=remote_filename, ) # Must treat dir vs file paths differently, lest we erroneously # mkdir what was intended as a filename, and so that non-empty # dir-like paths still get remote filename tacked on. if local.endswith(os.sep): dir_path = local local = os.path.join(local, remote_filename) else: dir_path, _ = os.path.split(local) local = os.path.abspath(local) Path(dir_path).mkdir(parents=True, exist_ok=True) # TODO: reimplement mkdir (or otherwise write a testing function) # allowing us to track what was created so we can revert if # transfer fails. # TODO: Alternately, transfer to temp location and then move, but # that's basically inverse of v1's sudo-put which gets messy # Run Paramiko-level .get() (side-effects only. womp.) # TODO: push some of the path handling into Paramiko; it should be # responsible for dealing with path cleaning etc. # TODO: probably preserve warning message from v1 when overwriting # existing files. Use logging for that obviously. # # If local appears to be a file-like object, use sftp.getfo, not get if is_file_like: self.sftp.getfo(remotepath=remote, fl=local) else: self.sftp.get(remotepath=remote, localpath=local) # Set mode to same as remote end # TODO: Push this down into SFTPClient sometime (requires backwards # incompat release.) if preserve_mode: remote_mode = self.sftp.stat(remote).st_mode mode = stat.S_IMODE(remote_mode) os.chmod(local, mode) # Return something useful return Result( orig_remote=orig_remote, remote=remote, orig_local=orig_local, local=local, connection=self.connection, ) def put(self, local, remote=None, preserve_mode=True): """ Upload a file from the local filesystem to the current connection. :param local: Local path of file to upload, or a file-like object. **If a string is given**, it should be a path to a local (regular) file (not a directory). .. note:: When dealing with nonexistent file paths, normal Python file handling concerns come into play - for example, trying to upload a nonexistent ``local`` path will typically result in an `OSError`. **If a file-like object is given**, its contents are written to the remote file path. :param str remote: Remote path to which the local file will be written. .. note:: Most SFTP servers set the remote working directory to the connecting user's home directory, and (unlike most shells) do *not* expand tildes (``~``). For example, instead of saying ``put("archive.tgz", "~/tmp/")``, say ``put("archive.tgz", "tmp/")``. In addition, this means that 'falsey'/empty values (such as the default value, ``None``) are allowed and result in uploading to the remote home directory. .. note:: When ``local`` is a file-like object, ``remote`` is required and must refer to a valid file path (not a directory). :param bool preserve_mode: Whether to ``chmod`` the remote file so it matches the local file's mode (default: ``True``). :returns: A `.Result` object. .. versionadded:: 2.0 """ if not local: raise ValueError("Local path must not be empty!") is_file_like = hasattr(local, "write") and callable(local.write) # Massage remote path orig_remote = remote if is_file_like: local_base = getattr(local, "name", None) else: local_base = os.path.basename(local) if not remote: if is_file_like: raise ValueError( "Must give non-empty remote path when local is a file-like object!" # noqa ) else: remote = local_base debug("Massaged empty remote path into {!r}".format(remote)) elif self.is_remote_dir(remote): # non-empty local_base implies a) text file path or b) FLO which # had a non-empty .name attribute. huzzah! if local_base: remote = posixpath.join(remote, local_base) else: if is_file_like: raise ValueError( "Can't put a file-like-object into a directory unless it has a non-empty .name attribute!" # noqa ) else: # TODO: can we ever really end up here? implies we want to # reorganize all this logic so it has fewer potential holes raise ValueError( "Somehow got an empty local file basename ({!r}) when uploading to a directory ({!r})!".format( # noqa local_base, remote ) ) prejoined_remote = remote remote = posixpath.join( self.sftp.getcwd() or self.sftp.normalize("."), remote ) if remote != prejoined_remote: msg = "Massaged relative remote path {!r} into {!r}" debug(msg.format(prejoined_remote, remote)) # Massage local path orig_local = local if not is_file_like: local = os.path.abspath(local) if local != orig_local: debug( "Massaged relative local path {!r} into {!r}".format( orig_local, local ) ) # noqa # Run Paramiko-level .put() (side-effects only. womp.) # TODO: push some of the path handling into Paramiko; it should be # responsible for dealing with path cleaning etc. # TODO: probably preserve warning message from v1 when overwriting # existing files. Use logging for that obviously. # # If local appears to be a file-like object, use sftp.putfo, not put if is_file_like: msg = "Uploading file-like object {!r} to {!r}" debug(msg.format(local, remote)) pointer = local.tell() try: local.seek(0) self.sftp.putfo(fl=local, remotepath=remote) finally: local.seek(pointer) else: debug("Uploading {!r} to {!r}".format(local, remote)) self.sftp.put(localpath=local, remotepath=remote) # Set mode to same as local end # TODO: Push this down into SFTPClient sometime (requires backwards # incompat release.) if preserve_mode: local_mode = os.stat(local).st_mode mode = stat.S_IMODE(local_mode) self.sftp.chmod(remote, mode) # Return something useful return Result( orig_remote=orig_remote, remote=remote, orig_local=orig_local, local=local, connection=self.connection, ) class Result(object): """ A container for information about the result of a file transfer. See individual attribute/method documentation below for details. .. note:: Unlike similar classes such as `invoke.runners.Result` or `fabric.runners.Result` (which have a concept of "warn and return anyways on failure") this class has no useful truthiness behavior. If a file transfer fails, some exception will be raised, either an `OSError` or an error from within Paramiko. .. versionadded:: 2.0 """ # TODO: how does this differ from put vs get? field stating which? (feels # meh) distinct classes differing, for now, solely by name? (also meh) def __init__(self, local, orig_local, remote, orig_remote, connection): #: The local path the file was saved as, or the object it was saved #: into if a file-like object was given instead. #: #: If a string path, this value is massaged to be absolute; see #: `.orig_local` for the original argument value. self.local = local #: The original value given as the returning method's ``local`` #: argument. self.orig_local = orig_local #: The remote path downloaded from. Massaged to be absolute; see #: `.orig_remote` for the original argument value. self.remote = remote #: The original argument value given as the returning method's #: ``remote`` argument. self.orig_remote = orig_remote #: The `.Connection` object this result was obtained from. self.connection = connection # TODO: ensure str/repr makes it easily differentiable from run() or # local() result objects (and vice versa). fabric-2.6.0/fabric/tunnels.py000066400000000000000000000126201400143053200162260ustar00rootroot00000000000000""" Tunnel and connection forwarding internals. If you're looking for simple, end-user-focused connection forwarding, please see `.Connection`, e.g. `.Connection.forward_local`. """ import errno import select import socket import time from threading import Event from invoke.exceptions import ThreadException from invoke.util import ExceptionHandlingThread class TunnelManager(ExceptionHandlingThread): """ Thread subclass for tunnelling connections over SSH between two endpoints. Specifically, one instance of this class is sufficient to sit around forwarding any number of individual connections made to one end of the tunnel or the other. If you need to forward connections between more than one set of ports, you'll end up instantiating multiple TunnelManagers. Wraps a `~paramiko.transport.Transport`, which should already be connected to the remote server. .. versionadded:: 2.0 """ def __init__( self, local_host, local_port, remote_host, remote_port, transport, finished, ): super(TunnelManager, self).__init__() self.local_address = (local_host, local_port) self.remote_address = (remote_host, remote_port) self.transport = transport self.finished = finished def _run(self): # Track each tunnel that gets opened during our lifetime tunnels = [] # Set up OS-level listener socket on forwarded port sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # TODO: why do we want REUSEADDR exactly? and is it portable? sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # NOTE: choosing to deal with nonblocking semantics and a fast loop, # versus an older approach which blocks & expects outer scope to cause # a socket exception by close()ing the socket. sock.setblocking(0) sock.bind(self.local_address) sock.listen(1) while not self.finished.is_set(): # Main loop-wait: accept connections on the local listener # NOTE: EAGAIN means "you're nonblocking and nobody happened to # connect at this point in time" try: tun_sock, local_addr = sock.accept() # Set TCP_NODELAY to match OpenSSH's forwarding socket behavior tun_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) except socket.error as e: if e.errno is errno.EAGAIN: # TODO: make configurable time.sleep(0.01) continue raise # Set up direct-tcpip channel on server end # TODO: refactor w/ what's used for gateways channel = self.transport.open_channel( "direct-tcpip", self.remote_address, local_addr ) # Set up 'worker' thread for this specific connection to our # tunnel, plus its dedicated signal event (which will appear as a # public attr, no need to track both independently). finished = Event() tunnel = Tunnel(channel=channel, sock=tun_sock, finished=finished) tunnel.start() tunnels.append(tunnel) exceptions = [] # Propogate shutdown signal to all tunnels & wait for closure # TODO: would be nice to have some output or at least logging here, # especially for "sets up a handful of tunnels" use cases like # forwarding nontrivial HTTP traffic. for tunnel in tunnels: tunnel.finished.set() tunnel.join() wrapper = tunnel.exception() if wrapper: exceptions.append(wrapper) # Handle exceptions if exceptions: raise ThreadException(exceptions) # All we have left to close is our own sock. # TODO: use try/finally? sock.close() class Tunnel(ExceptionHandlingThread): """ Bidirectionally forward data between an SSH channel and local socket. .. versionadded:: 2.0 """ def __init__(self, channel, sock, finished): self.channel = channel self.sock = sock self.finished = finished self.socket_chunk_size = 1024 self.channel_chunk_size = 1024 super(Tunnel, self).__init__() def _run(self): try: empty_sock, empty_chan = None, None while not self.finished.is_set(): r, w, x = select.select([self.sock, self.channel], [], [], 1) if self.sock in r: empty_sock = self.read_and_write( self.sock, self.channel, self.socket_chunk_size ) if self.channel in r: empty_chan = self.read_and_write( self.channel, self.sock, self.channel_chunk_size ) if empty_sock or empty_chan: break finally: self.channel.close() self.sock.close() def read_and_write(self, reader, writer, chunk_size): """ Read ``chunk_size`` from ``reader``, writing result to ``writer``. Returns ``None`` if successful, or ``True`` if the read was empty. .. versionadded:: 2.0 """ data = reader.recv(chunk_size) if len(data) == 0: return True writer.sendall(data) fabric-2.6.0/fabric/util.py000066400000000000000000000026461400143053200155220ustar00rootroot00000000000000import logging import sys # Ape the half-assed logging junk from Invoke, but ensuring the logger reflects # our name, not theirs. (Assume most contexts will rely on Invoke itself to # literally enable/disable logging, for now.) log = logging.getLogger("fabric") for x in ("debug",): globals()[x] = getattr(log, x) win32 = sys.platform == "win32" def get_local_user(): """ Return the local executing username, or ``None`` if one can't be found. .. versionadded:: 2.0 """ # TODO: I don't understand why these lines were added outside the # try/except, since presumably it means the attempt at catching ImportError # wouldn't work. However, that's how the contributing user committed it. # Need an older Windows box to test it out, most likely. import getpass username = None # All Unix and most Windows systems support the getpass module. try: username = getpass.getuser() # Some SaaS platforms raise KeyError, implying there is no real user # involved. They get the default value of None. except KeyError: pass # Older (?) Windows systems don't support getpass well; they should # have the `win32` module instead. except ImportError: # pragma: nocover if win32: import win32api import win32security # noqa import win32profile # noqa username = win32api.GetUserName() return username fabric-2.6.0/fabric2000077700000000000000000000000001400143053200153342fabricustar00rootroot00000000000000fabric-2.6.0/integration/000077500000000000000000000000001400143053200152605ustar00rootroot00000000000000fabric-2.6.0/integration/_support/000077500000000000000000000000001400143053200171335ustar00rootroot00000000000000fabric-2.6.0/integration/_support/file.txt000066400000000000000000000000041400143053200206050ustar00rootroot00000000000000yup fabric-2.6.0/integration/_support/funky-perms.txt000066400000000000000000000000041400143053200221460ustar00rootroot00000000000000wat fabric-2.6.0/integration/concurrency.py000066400000000000000000000070501400143053200201660ustar00rootroot00000000000000import codecs from invoke.vendor.six.moves.queue import Queue from invoke.vendor.six.moves import zip_longest from invoke.util import ExceptionHandlingThread from pytest import skip from fabric import Connection _words = "/usr/share/dict/words" def _worker(queue, cxn, start, num_words, count, expected): tail = num_words - start cmd = "tail -n {} {} | head -n {}".format(tail, _words, count) stdout = cxn.run(cmd, hide=True).stdout result = [x.strip() for x in stdout.splitlines()] queue.put((cxn, result, expected)) class concurrency: # TODO: still useful to use Group API here? Where does this responsibility # fall between Group and Executor (e.g. phrasing this specifically as a # generic subcase of Invoke level task parameterization)? # TODO: spin up multiple temp SSHDs / Paramiko servers / ??? def setup(self): cxn1 = Connection("localhost") cxn2 = Connection("localhost") cxn3 = Connection("localhost") self.cxns = (cxn1, cxn2, cxn3) def connections_objects_do_not_share_connection_state(self): cxn1, cxn2, cxn3 = self.cxns [x.open() for x in self.cxns] # Prove no exterior connection caching, socket reuse, etc # NOTE: would phrase these as chained 'is not' but pep8 linter is being # stupid :( assert cxn1 is not cxn2 assert cxn2 is not cxn3 assert cxn1.client is not cxn2.client assert cxn2.client is not cxn3.client ports = [x.transport.sock.getsockname()[1] for x in self.cxns] assert ports[0] is not ports[1] is not ports[2] def manual_threading_works_okay(self): # TODO: needs https://github.com/pyinvoke/invoke/issues/438 fixed # before it will reliably pass skip() # Kind of silly but a nice base case for "how would someone thread this # stuff; and are there any bizarre gotchas lurking in default # config/context/connection state?" # Specifically, cut up the local (usually 100k's long) words dict into # per-thread chunks, then read those chunks via shell command, as a # crummy "make sure each thread isn't polluting things like stored # stdout" sanity test queue = Queue() # TODO: skip test on Windows or find suitable alternative file with codecs.open(_words, encoding="utf-8") as fd: data = [x.strip() for x in fd.readlines()] threads = [] num_words = len(data) chunksize = len(data) / len(self.cxns) # will be an int, which is fine for i, cxn in enumerate(self.cxns): start = i * chunksize end = max([start + chunksize, num_words]) chunk = data[start:end] kwargs = dict( queue=queue, cxn=cxn, start=start, num_words=num_words, count=len(chunk), expected=chunk, ) thread = ExceptionHandlingThread(target=_worker, kwargs=kwargs) threads.append(thread) for t in threads: t.start() for t in threads: t.join(5) # Kinda slow, but hey, maybe the test runner is hot while not queue.empty(): cxn, result, expected = queue.get(block=False) for resultword, expectedword in zip_longest(result, expected): err = u"({2!r}, {3!r}->{4!r}) {0!r} != {1!r}".format( resultword, expectedword, cxn, expected[0], expected[-1] ) assert resultword == expectedword, err fabric-2.6.0/integration/connection.py000066400000000000000000000120211400143053200177650ustar00rootroot00000000000000import os import time from invoke import pty_size, CommandTimedOut from pytest import skip, raises from fabric import Connection, Config # TODO: use pytest markers def skip_outside_travis(): if not os.environ.get("TRAVIS", False): skip() class Connection_: class ssh_connections: def open_method_generates_real_connection(self): c = Connection("localhost") c.open() assert c.client.get_transport().active is True assert c.is_connected is True return c def close_method_closes_connection(self): # Handy shortcut - open things up, then return Connection for us to # close c = self.open_method_generates_real_connection() c.close() assert c.client.get_transport() is None assert c.is_connected is False class run: def simple_command_on_host(self): """ Run command on localhost """ result = Connection("localhost").run("echo foo", hide=True) assert result.stdout == "foo\n" assert result.exited == 0 assert result.ok is True def simple_command_with_pty(self): """ Run command under PTY on localhost """ # Most Unix systems should have stty, which asplodes when not run # under a pty, and prints useful info otherwise result = Connection("localhost").run( "stty size", hide=True, pty=True ) found = result.stdout.strip().split() cols, rows = pty_size() assert tuple(map(int, found)), rows == cols # PTYs use \r\n, not \n, line separation assert "\r\n" in result.stdout assert result.pty is True class local: def wraps_invoke_run(self): # NOTE: most of the interesting tests about this are in # invoke.runners / invoke.integration. cxn = Connection("localhost") result = cxn.local("echo foo", hide=True) assert result.stdout == "foo\n" assert not cxn.is_connected # meh way of proving it didn't use SSH def mixed_use_of_local_and_run(self): """ Run command truly locally, and over SSH via localhost """ cxn = Connection("localhost") result = cxn.local("echo foo", hide=True) assert result.stdout == "foo\n" assert not cxn.is_connected # meh way of proving it didn't use SSH yet result = cxn.run("echo foo", hide=True) assert cxn.is_connected # NOW it's using SSH assert result.stdout == "foo\n" class sudo: def setup(self): # NOTE: assumes a user configured for passworded (NOT # passwordless)_sudo, whose password is 'mypass', is executing the # test suite. I.e. our travis-ci setup. config = Config( {"sudo": {"password": "mypass"}, "run": {"hide": True}} ) self.cxn = Connection("localhost", config=config) def sudo_command(self): """ Run command via sudo on host localhost """ skip_outside_travis() assert self.cxn.sudo("whoami").stdout.strip() == "root" def mixed_sudo_and_normal_commands(self): """ Run command via sudo, and not via sudo, on localhost """ skip_outside_travis() logname = os.environ["LOGNAME"] assert self.cxn.run("whoami").stdout.strip() == logname assert self.cxn.sudo("whoami").stdout.strip() == "root" def large_remote_commands_finish_cleanly(self): # Guards against e.g. cleanup finishing before actually reading all # data from the remote end. Which is largely an issue in Invoke-level # code but one that only really manifests when doing stuff over the # network. Yay computers! path = "/usr/share/dict/words" cxn = Connection("localhost") with open(path) as fd: words = [x.strip() for x in fd.readlines()] stdout = cxn.run("cat {}".format(path), hide=True).stdout lines = [x.strip() for x in stdout.splitlines()] # When bug present, # lines received is significantly fewer than the # true count in the file (by thousands). assert len(lines) == len(words) class command_timeout: def setup(self): self.cxn = Connection("localhost") def does_not_raise_exception_when_under_timeout(self): assert self.cxn.run("sleep 1", timeout=3) def raises_exception_when_over_timeout(self): with raises(CommandTimedOut) as info: start = time.time() self.cxn.run("sleep 5", timeout=1) elapsed = time.time() - start assert info.value.timeout == 1 # Catch scenarios where we except but don't actually shut down # early (w/ a bit of fudge time for overhead) assert elapsed <= 2 fabric-2.6.0/integration/group.py000066400000000000000000000023341400143053200167700ustar00rootroot00000000000000from socket import gaierror from fabric import ThreadingGroup as Group from fabric.exceptions import GroupException class Group_: def simple_command(self): group = Group("localhost", "127.0.0.1") result = group.run("echo foo", hide=True) outs = [x.stdout.strip() for x in result.values()] assert ["foo", "foo"] == outs def failed_command(self): group = Group("localhost", "127.0.0.1") try: group.run("lolnope", hide=True) except GroupException as e: # GroupException.result -> GroupResult; # GroupResult values will be UnexpectedExit in this case; # UnexpectedExit.result -> Result, and thus .exited etc. exits = [x.result.exited for x in e.result.values()] assert [127, 127] == exits else: assert False, "Did not raise GroupException!" def excepted_command(self): group = Group("nopebadhost1", "nopebadhost2") try: group.run("lolnope", hide=True) except GroupException as e: for value in e.result.values(): assert isinstance(value, gaierror) else: assert False, "Did not raise GroupException!" fabric-2.6.0/integration/transfer.py000066400000000000000000000065031400143053200174620ustar00rootroot00000000000000import os import stat from io import BytesIO from py import path from fabric import Connection def _support(*parts): return os.path.join(os.path.dirname(__file__), "_support", *parts) class Transfer_: class get: def setup(self): self.c = Connection("localhost") self.remote = _support("file.txt") def base_case(self, tmpdir): # Copy file from support to tempdir with tmpdir.as_cwd(): result = self.c.get(self.remote) # Make sure it arrived local = tmpdir.join("file.txt") assert local.check() assert local.read() == "yup\n" # Sanity check result object assert result.remote == self.remote assert result.orig_remote == self.remote assert result.local == str(local) assert result.orig_local is None def file_like_objects(self): fd = BytesIO() result = self.c.get(remote=self.remote, local=fd) assert fd.getvalue() == b"yup\n" assert result.remote == self.remote assert result.local is fd def mode_preservation(self, tmpdir): # Use a dummy file which is given an unusual, highly unlikely to be # default umask, set of permissions (oct 641, aka -rw-r----x) local = tmpdir.join("funky-local.txt") remote = tmpdir.join("funky-remote.txt") remote.write("whatever") remote.chmod(0o641) self.c.get(remote=str(remote), local=str(local)) assert stat.S_IMODE(local.stat().mode) == 0o641 class put: def setup(self): self.c = Connection("localhost") self.remote = path.local.mkdtemp().join("file.txt").realpath() def base_case(self): # Copy file from 'local' (support dir) to 'remote' (tempdir) local_dir = _support() with path.local(local_dir).as_cwd(): tmpdir = self.remote.dirpath() # TODO: wrap chdir at the Connection level self.c.sftp().chdir(str(tmpdir)) result = self.c.put("file.txt") # Make sure it arrived assert self.remote.check() assert self.remote.read() == "yup\n" # Sanity check result object assert result.remote == self.remote assert result.orig_remote is None assert result.local == _support("file.txt") assert result.orig_local == "file.txt" def file_like_objects(self): fd = BytesIO() fd.write(b"yup\n") remote_str = str(self.remote) result = self.c.put(local=fd, remote=remote_str) assert self.remote.read() == "yup\n" assert result.remote == remote_str assert result.local is fd def mode_preservation(self, tmpdir): # Use a dummy file which is given an unusual, highly unlikely to be # default umask, set of permissions (oct 641, aka -rw-r----x) local = tmpdir.join("funky-local.txt") local.write("whatever") local.chmod(0o641) remote = tmpdir.join("funky-remote.txt") self.c.put(remote=str(remote), local=str(local)) assert stat.S_IMODE(remote.stat().mode) == 0o641 fabric-2.6.0/setup.cfg000066400000000000000000000003201400143053200145510ustar00rootroot00000000000000[wheel] universal = 1 [flake8] exclude = .git,sites ignore = E124,E125,E128,E261,E301,E302,E303,W503 max-line-length = 79 [metadata] license_file = LICENSE [tool:pytest] testpaths = tests python_files = * fabric-2.6.0/setup.py000066400000000000000000000074761400143053200144650ustar00rootroot00000000000000#!/usr/bin/env python import os import setuptools # Enable the option of building/installing Fabric 2.x as "fabric2". This allows # users migrating from 1.x to 2.x to have both in the same process space and # migrate piecemeal. # # NOTE: this requires some irritating tomfoolery; to wit: # - the repo has a fabric2/ symlink to fabric/ so that things looking for # fabric2/ will find it OK, whether that's code in here or deeper in # setuptools/wheel/etc # - wheels do _not_ execute this on install, only on generation, so maintainers # just build wheels with the env var below turned on, and those wheels install # 'fabric2' no problem # - sdists execute this _both_ on package creation _and_ on install, so the env # var only helps with inbound package metadata; on install by a user, if they # don't have the env var, they'd end up with errors because this file tries to # look in fabric/, not fabric2/ # - thus, we use a different test that looks locally to see if only one dir # is present, and that overrides the env var test. # # See also sites/www/installing.txt. env_wants_v2 = os.environ.get("PACKAGE_AS_FABRIC2", False) here = os.path.abspath(os.path.dirname(__file__)) fabric2_present = os.path.isdir(os.path.join(here, "fabric2")) fabric_present = os.path.isdir(os.path.join(here, "fabric")) only_v2_present = fabric2_present and not fabric_present package_name = "fabric" binary_name = "fab" if env_wants_v2 or only_v2_present: package_name = "fabric2" binary_name = "fab2" packages = setuptools.find_packages( include=[package_name, "{}.*".format(package_name)] ) # Version info -- read without importing _locals = {} with open(os.path.join(package_name, "_version.py")) as fp: exec(fp.read(), None, _locals) version = _locals["__version__"] # Frankenstein long_description: changelog note + README long_description = """ To find out what's new in this version of Fabric, please see `the changelog `_. {} """.format( open("README.rst").read() ) testing_deps = ["mock>=2.0.0,<3.0"] pytest_deps = ["pytest>=3.2.5,<4.0"] setuptools.setup( name=package_name, version=version, description="High level SSH command execution", license="BSD", long_description=long_description, author="Jeff Forcier", author_email="jeff@bitprophet.org", url="http://fabfile.org", install_requires=["invoke>=1.3,<2.0", "paramiko>=2.4", "pathlib2"], extras_require={ "testing": testing_deps, "pytest": testing_deps + pytest_deps, }, packages=packages, entry_points={ "console_scripts": [ "{} = {}.main:program.run".format(binary_name, package_name) ] }, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Operating System :: POSIX", "Operating System :: Unix", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Software Development", "Topic :: Software Development :: Build Tools", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Clustering", "Topic :: System :: Software Distribution", "Topic :: System :: Systems Administration", ], ) fabric-2.6.0/sites/000077500000000000000000000000001400143053200140645ustar00rootroot00000000000000fabric-2.6.0/sites/_shared_static/000077500000000000000000000000001400143053200170405ustar00rootroot00000000000000fabric-2.6.0/sites/_shared_static/logo.png000066400000000000000000000144011400143053200205060ustar00rootroot00000000000000‰PNG  IHDR–u¬ÑÎ$iCCPICC Profile8…UßoÛT>‰oR¤? XG‡ŠÅ¯US[¹­ÆI“¥íJ¥éØ*$ä:7‰©Û鶪O{7ü@ÙH§kk?ì<Ê»øÎí¾kktüqóÝ‹mÇ6°nÆ¶ÂøØ¯±-ümR;`zŠ–¡Êðv x#=\Ó% ëoàYÐÚRÚ±£¥êùÐ#&Á?È>ÌÒ¹áЪþ¢þ©n¨_¨Ôß;j„;¦$}*}+ý(}'}/ýLŠtYº"ý$]•¾‘.9»ï½Ÿ%Ø{¯_aÝŠ]hÕkŸ5'SNÊ{äå”ü¼ü²<°¹_“§ä½ðì öÍ ý½t ³jMµ{-ñ4%ׯTÅ„«tYÛŸ“¦R6ÈÆØô#§v\œå–Šx:žŠ'H‰ï‹OÄÇâ3·ž¼ø^ø&°¦õþ“0::àm,L%È3â:qVEô t›ÐÍ]~ߢI«vÖ6ÊWÙ¯ª¯) |ʸ2]ÕG‡Í4Ïå(6w¸½Â‹£$¾ƒ"ŽèAÞû¾EvÝ mî[D‡ÿÂ;ëVh[¨}íõ¿Ú†ðN|æ3¢‹õº½âç£Hä‘S:°ßûéKâÝt·Ñx€÷UÏ'D;7ÿ®7;_"ÿÑeó?Yqxl+ pHYs  ÒÝ~üiTXtXML:com.adobe.xmp Adobe Fireworks CS5.1 1 ·B iIDATxí] ŒUÅžWk(H…d­ .lºaQײ+ŠEE ZŒ•R‹Ji°`­´­F¥ÿR)Ñ*¦Å*ØšXP´(5nâOµRe±¢²âÚ­‹Y È&Ð,1Íëù;x÷¾;sgî™;ïí›äå¾;w~ÏýîÌ™sΜÉå)°J¨PÀ0¾`¸¼Jq (PVV(P–²V ­«‚+(=`}²™1üzPèØ{€½ú½%Õã/–Lk LùÖk;øñçMîSÃXÿz–«šÈX¿qŸÇ—ø¿ÍÛö³×þù1{¹eÛ²µ³[ofN¬e7\8¢[œ7¹’7T-W2ö¿bч±ª±²óèú-q:OŸL«6t°WßÝÅöîûTÚÊRWI+ßrcûß•»èaU#Ëûm¯A¦¦pÿšLd5ÇÐÇäiðXû×Óhuuròa$0‰@öÆúŽN^Ž¡œû|ÆV¾ÞÁžxekìÈ$«ò¼¯W³ûfž&K’é3ïy¬üî§ÒÓçÎÕ,O?F[¸ozû#v犷R L¦³F dÓN¯ñvê w¾´€l=˜èÎ5´—èÚ|’Íÿ~£X®va¤jBÌù$ëÒÑLŒÌ&<¨dÀ$|é+Ø _F2Rnçjo‰´ƒ¹Ìí«6K­ÀôOS]R#Sð5ÿ—°‚=?Sèÿ² `ì‡ÞY;t‰KžÙÒíVw5 ñžoêÖ蘛òï0M•ùËhE¹ÊŽ…¯Gt…í×ÉË#Wœï:£n›ÚXà +·P¾ÀâoŠlÏsÝ7bðç6¯˜ë– W6«ÎºìòV€ÂÉy#X®öæL¬V]wþ·G«@]>‚¹ kIæÕCBϱXIv,vËäKø.Þ¬r¹ö\`ñ7ˆUdû"w×i=oV©_½VB› .ôv×>ú¦x‹:í ÎÕ?ËØÐ¹´‚s`‚Sl¾Ýó¸‰îy[FYXxÏùõß«S`Ö2ï‚➘ÛˆÑÖÝ›ôõJTAI‹ô%_Ù 6èw­|+’ÎØ¢>gÂp±eYæÛît#ÿ*S¦>°`My€¤Ç½Óôq´wrš»Ÿme>ß * ]ÜϦ։·OaõøÞÝŒ^2õ`!¶‘Ù3LŸa¥ª+ÄÕV¾<¾„Õ%àMúבCŽsIOvA¤´™¿P›W}\¸~¨SͨNù1¶uI8›ù{˜zh6¶}é˜âý³•âZÀʫݑ¦ŸU' MŽÔö›+ŒT3î_¯eAÀÛåï¬Éµì*š#¼Ý´ýÜþÊQ¢ÄŽlWÊHÐl}Û^†¯Ø£·RǸPX`l߸P¿+æá/ÁâHfÒõy(F¯ù4=FîÓÃÔØq¯kVZ¡æý(i7bóad˜’ø‘Pu¡¤ ¬È)0¶ ¡\ q…nÜ×j‰ð6vôrÅØfêÁ3ýî¥غ7vÄŽLBâÐЧùnbwb‚:°L:ç ~‚ Ë ˜žŠ‹2A‰é¯òcŒ^^Ù½rÄè½å'ö§FžôÛT4‡·üåö"πʄˆH¨âŒD XøJiñ¦^ø4r$aÒ“´àZ9Oà‚ScûmÅ ™$Éò€•¨ý…ÖÊô1áƒKÔ,ß\JÀ22 ŠZÉã½¾¤aÒyU*WïÖi££G¬`$AÏ·ßoWæ¦^Áü´YF†„«þúaªé.ؽ¨ÿ*Ó¡šïø2°¸S‰4Ú“ו¹¯Ùɉ[®ÏÈbÇ»átiîÉ#!ë-Þ¼ë P¼ àgá³B&ÛŠñR§²¼6W˜ôÂGDIÒ#’'ŠÂ—xÏì)Á" &”¹ú§Í Tc¦A×€ ö}É»Òë¿ÛƒåÙù#¿9ÉÒX§±Ñ,™Ó(”Æ–Enru1eYŸ¬@ðž5³¤"‡,Ü&Éš~?b…s˜¾—l›r±£ÝQæ§ú^ØDÑ÷Ää|—„¨ìÀ]O·]å)t«(É›íÝýχd , ?eZ>î8¿×â§x¦¸+ç»â|ӇˑF1íÝÛÔj|gu¸ ª÷§ «’&Vß:i‰Æð*JäDõ2ŸºåòSÅJè@ÚDÁw5¬cùwfÄË»bèáê#ÓéguÕQÒäjâ¸ï#Ø´¼¼ ¶£cê»qù&ëÃ<ø©».¯VݤícD~©¸Fâ\£ÔõØÄš·ìŠ(5»(Яéæs¤ PJHÍ”Æð®¾J©D]Jªt‹¬$b,®è¡Û+ÐOf ÂËSV!´ü;Ñ“6Ãd(¹ûœoãò«¼xÜ ø2 °¯œ‡€&#iùá’:tÀ(uùøaÊ»µõ€Å[BꌂCŽ}éJÂӨÓø´'qëê«?5ïâQÊDáÝ´r…\Pph„Ž×?¼ÑªÔ\§OÓ”†Áì|rL¢{nO2`…[]bç Ÿƒ «=  \~•Õ²ŸžéŒŸ “FõÞ†/-Õºƒé0ÕÁÄùtâE§nf€l™ä‹D2—öñÅùìõôp%«C]¢ëÐïšhd•iX‚š0Ì/"ÁžŽ(AQJÑ*x¥‚,&Êzêk9ýàìaRÕLÒîÇ˱’–Ü•_äOµ8]2[z¦¤C8» Y]¸N~Nº Ž'Lqµ,jñšVë:¾`ßKŸrÉ_†i3í'È·½3¤üoXY 4ʧºÌ‰sîÉÔë ô|º.#S¾ßBöØ}”&* •aŒÇÊ PèL>Up]Ô¶%ÑSxëJ´l€Ò”ÇEG3ö¥d.NfH×™6¤Ì7žÙ¸Íº&ª£RùL‡ÛHåøÖ€-yT{Dq®ù)ŒÞsÏ?QΔÃnÿ_¿gl÷ºb9¤= ‰€…ÑéI:×xÍÛ3; ¢¡¾Oe ¤ßí"èăŸŠsh«S^\Z釯3P:î›0 ó´ÉyŠNЖÇKž{?“Ñ)Ø)©ý”Îö¬}]°NÝÿUÒ ´ºu!½”&H  (ä |ˆ5³•÷‡* Dºjisæ€Â)3uI¬(—ØAi«úß•m>Úáæd©!4K'¿A>Sµ#Qé°Zò3‚)kÊ¿8lFõqR­:ˆ—Ö‡!¦<çâÕï:Ñ÷aÑrÝ”bÕ v)'Ÿ\¦Cî”åÒ-i±â¬f\IËE—nëæQŠpQQñØ%tð²TD]1é¹¥›>Àc~øK«žr`å"ó© ¬Öíû¢^ƒ“8)ƒN-H<õ‰Zs ÉâSwƒ(Šs±áõ@ sÏ÷ë£G)ÓÓ*…ƒ»EO ñ±ÀúrFÇ–IG)0¢¶¶¸ÓáOpå˜;‰¬fcøPГ»âÃÈÝFGðY˜ö¤<ŒÖÈêþ‚¬v¢¥¼TW•ù]Œ·#OÓ<šVó-WÓÈEn$ÂB0é×<Ül]ä7r£«ù}܂ꨯJ) ,ájCZ¬þÃØ/2PdÁf¾ïëîóm iHÚ(ä%Vmè°ªX½‹.šT}Ó:oÅ_C޶¸É‚ÒªpöCÍV­T‰WÔ›Sb°2‰¤Þ–Ç›8ÑJ°yEÿø“È}UêG X¶¶·«L{ED‹ˆî‚‰HŸ( [´vê¦íªT¦¾Ø>pFÞÆ ðs6‚dc’ ,äo¼a1ÙŒ’.KÒèÈGøJ1uÙ Ô ˜z¨¹Lœ+A×ífØÝRnÑY°9†7OX&d4VÅ{‚+ä7­ä|×öÊH"©O#v°j ˜V¥Ãé Éû˜ø³ •…eõY7­áÅk]­*Ø|¡*»ƒy’ü‡¤~Øm‘|†®ô=Và™¤}¢<`¢ÝU¢|]ñq+ež]XÈ ;jaXŸuöðLvÈdÍw©Nà§Ry¹áoR÷Êy0sG+4E Xȧ7ä;>¦­‰–£W!c¾ Ý”më2ÎO%¥+,Böü)Úv— þRÁ« O® ¬(K åÞC ¾K×ë §ŽÎUâ9«ÆtŠýÞ}Ÿ.QªY8œÊñL“kÛÕÔ]­hÑl­6°xf õ8£_ï#3™êx;”®®ø.‰¼ $ÜÁ¶ÌôG©?.ÍØ'-‡jRPm…›”XႼ¿¸ /»#ûlÑx0²>O#{°º^@‘×[/FâžÈV•>•Ûã€U ¾Žùrš·•€7ISOy{&°ðT6\˜zS=pôê¹ÀhÀw¥5iV_½ÊþLhþÞ! / ].´(ÖCá,è«IåD~ìiY_î¡ìG,ØìÿмÜ@†)·Ð­‘S“Ã`¢•cÁ«Ä‡Øá´%ú§l%Ú a®Ð›+%6 ¦GìÕ“X©ò¤¥v-;`ApçåຎtÉwq´”!ÀÊX*€âï‘_¥ÎDT)æ…™¸BroÈ)‡‰æ¤)£ä%šòT‰“á—®ä]áÆ–ÁV’À‚Þí9bÊ—®}¿›b7ü~Tï¥'´»”w… &ŸŽ”Ë…ÐÓr8‹/÷%,l·zäå”N\×%p,ßåBÏ(kt‰M“%,È V¼Òîd«?l¤î›yZô+v)’ˆnY¬–†¨Âk`EÙ~‰èm2^jÕ ›%[»°u:+ Ã;¢M¤õXÓ¯ÏÌu¦F™ã"”&Þ†nWý^ò_Þªt0ZmÙ*?lQ÷=è¦ßA†Œ¢€ÇW>´‚Ë,À,>= Þ«e;Y0f ‚xòƳãÏÞé: Ú‰®QD‹Ïþ#z’i|¬ï†¬Z7¤Š˜TÇÓ߬ɵê'Ø£}Pd%wU½òöâ˜tEÕy;bá´)¼hWRWMž¬J² /lä¤M.C®ÿé.«S®Ë[`¡uë”;’4!„£ ¯h`+çÓ>:-ªÎï5æYò­5*ê±ù8G+h¥×ÀšPwœ.Š‹À¬Õ7cþ\h:0W÷9Î 6™{rΩØÇà5°pfžéBЦÙ JÚx[ä8ãðôÁ¦á«:×p‰æŠóZŽ…n¦q²&ø¨ùSë²Ù s¸Å6åV2¬±äÅÏÓà=°`3åŽçS‘€Š=$U ™!¹5¢Q “ì¼Ö)ÆVZï…Ž'•ÀcÊ›ÚX#v®o‹ª*å¦Á0Z5¬ó–¿B÷KX:£x²IcªÙÏnd•§‚‘Ti`…ƒÛw<ÖÝ_‚¬PÏG+4½$€…†Æ¹«Ät‡C²/=c’—f€í9æg2ßU0Ÿ¡§ï¡d€BwÜàVãG d—Ë$TË&ðQ¬ó%òÚÜrèØ7¬*aôWs×S %,ÞèÊÕ x-ÇòŸ|•Š(P–ˆ2•øT¨+ù*™E¨KD™J|* T€•Š|•Ì" T€%¢L%>*ÀJE¾Jf*ÀQ¦ŸŠÿ –ì9lïóÏIEND®B`‚fabric-2.6.0/sites/docs/000077500000000000000000000000001400143053200150145ustar00rootroot00000000000000fabric-2.6.0/sites/docs/api/000077500000000000000000000000001400143053200155655ustar00rootroot00000000000000fabric-2.6.0/sites/docs/api/config.rst000066400000000000000000000001101400143053200175540ustar00rootroot00000000000000============== ``config`` ============== .. automodule:: fabric.config fabric-2.6.0/sites/docs/api/connection.rst000066400000000000000000000001201400143053200204470ustar00rootroot00000000000000============== ``connection`` ============== .. automodule:: fabric.connection fabric-2.6.0/sites/docs/api/exceptions.rst000066400000000000000000000001201400143053200204710ustar00rootroot00000000000000============== ``exceptions`` ============== .. automodule:: fabric.exceptions fabric-2.6.0/sites/docs/api/executor.rst000066400000000000000000000001101400143053200201450ustar00rootroot00000000000000============ ``executor`` ============ .. automodule:: fabric.executor fabric-2.6.0/sites/docs/api/group.rst000066400000000000000000000001061400143053200174500ustar00rootroot00000000000000============== ``group`` ============== .. automodule:: fabric.group fabric-2.6.0/sites/docs/api/runners.rst000066400000000000000000000001041400143053200200060ustar00rootroot00000000000000=========== ``runners`` =========== .. automodule:: fabric.runners fabric-2.6.0/sites/docs/api/tasks.rst000066400000000000000000000000741400143053200174450ustar00rootroot00000000000000========= ``tasks`` ========= .. automodule:: fabric.tasks fabric-2.6.0/sites/docs/api/testing.rst000066400000000000000000000011751400143053200200000ustar00rootroot00000000000000.. _testing-subpackage: =========== ``testing`` =========== The ``fabric.testing`` subpackage contains a handful of test helper modules: - `fabric.testing.base` which only depends on things like ``mock`` and is appropriate in just about any test paradigm; - `fabric.testing.fixtures`, containing ``pytest`` fixtures and thus only of interest for users of ``pytest``. All are documented below. Please note the module-level documentation which contains install instructions! ``testing.base`` ================ .. automodule:: fabric.testing.base ``testing.fixtures`` ==================== .. automodule:: fabric.testing.fixtures fabric-2.6.0/sites/docs/api/transfer.rst000066400000000000000000000001441400143053200201420ustar00rootroot00000000000000============ ``transfer`` ============ .. automodule:: fabric.transfer :member-order: bysource fabric-2.6.0/sites/docs/api/tunnels.rst000066400000000000000000000001121400143053200200010ustar00rootroot00000000000000============== ``tunnels`` ============== .. automodule:: fabric.tunnels fabric-2.6.0/sites/docs/api/util.rst000066400000000000000000000000701400143053200172710ustar00rootroot00000000000000======== ``util`` ======== .. automodule:: fabric.util fabric-2.6.0/sites/docs/cli.rst000066400000000000000000000116221400143053200163170ustar00rootroot00000000000000====================== Command-line interface ====================== This page documents the details of Fabric's command-line interface, ``fab``. Options & arguments =================== .. note:: By default, ``fab`` honors all of the same CLI options as :ref:`Invoke's 'inv' program `; only additions and overrides are listed here! For example, Fabric implements :option:`--prompt-for-passphrase` and :option:`--prompt-for-login-password` because they are SSH specific, but it inherits a related option -- :ref:`--prompt-for-sudo-password ` -- from Invoke, which handles sudo autoresponse concerns. .. option:: -H, --hosts Takes a comma-separated string listing hostnames against which tasks should be executed, in serial. See :ref:`runtime-hosts`. .. option:: -i, --identity Overrides the ``key_filename`` value in the ``connect_kwargs`` config setting (which is read by `.Connection`, and eventually makes its way into Paramiko; see the docstring for `.Connection` for details.) Typically this can be thought of as identical to ``ssh -i ``, i.e. supplying a specific, runtime private key file. Like ``ssh -i``, it builds an iterable of strings and may be given multiple times. Default: ``[]``. .. option:: --prompt-for-login-password Causes Fabric to prompt 'up front' for a value to store as the ``connect_kwargs.password`` config setting (used by Paramiko when authenticating via passwords and, in some versions, also used for key passphrases.) Useful if you do not want to configure such values in on-disk conf files or via shell environment variables. .. option:: --prompt-for-passphrase Causes Fabric to prompt 'up front' for a value to store as the ``connect_kwargs.passphrase`` config setting (used by Paramiko to decrypt private key files.) Useful if you do not want to configure such values in on-disk conf files or via shell environment variables. .. option:: -S, --ssh-config Takes a path to load as a runtime SSH config file. See :ref:`ssh-config`. .. option:: -t, --connect-timeout Takes an integer of seconds after which connection should time out. Supplies the default value for the ``timeouts.connect`` config setting. Seeking & loading tasks ======================= ``fab`` follows all the same rules as Invoke's :ref:`collection loading `, with the sole exception that the default collection name sought is ``fabfile`` instead of ``tasks``. Thus, whenever Invoke's documentation mentions ``tasks`` or ``tasks.py``, Fabric substitutes ``fabfile`` / ``fabfile.py``. For example, if your current working directory is ``/home/myuser/projects/mywebapp``, running ``fab --list`` will cause Fabric to look for ``/home/myuser/projects/mywebapp/fabfile.py`` (or ``/home/myuser/projects/mywebapp/fabfile/__init__.py`` - Python's import system treats both the same). If it's not found there, ``/home/myuser/projects/fabfile.py`` is sought next; and so forth. .. _runtime-hosts: Runtime specification of host lists =================================== While advanced use cases may need to take matters into their own hands, you can go reasonably far with the core :option:`--hosts` flag, which specifies one or more hosts the given task(s) should execute against. By default, execution is a serial process: for each task on the command line, run it once for each host given to :option:`--hosts`. Imagine tasks that simply print ``Running on !``:: $ fab --hosts host1,host2,host3 taskA taskB Running taskA on host1! Running taskA on host2! Running taskA on host3! Running taskB on host1! Running taskB on host2! Running taskB on host3! .. note:: When :option:`--hosts` is not given, ``fab`` behaves similarly to Invoke's :ref:`command-line interface `, generating regular instances of `~invoke.context.Context` instead of `Connections <.Connection>`. Executing arbitrary/ad-hoc commands =================================== ``fab`` leverages a lesser-known command line convention and may be called in the following manner:: $ fab [options] -- [shell command] where everything after the ``--`` is turned into a temporary `.Connection.run` call, and is not parsed for ``fab`` options. If you've specified a host list via an earlier task or the core CLI flags, this usage will act like a one-line anonymous task. For example, let's say you wanted kernel info for a bunch of systems:: $ fab -H host1,host2,host3 -- uname -a Such a command is equivalent to the following Fabric library code:: from fabric import Group Group('host1', 'host2', 'host3').run("uname -a") Most of the time you will want to just write out the task in your fabfile (anything you use once, you're likely to use again) but this feature provides a handy, fast way to dash off an SSH-borne command while leveraging predefined connection settings. fabric-2.6.0/sites/docs/concepts/000077500000000000000000000000001400143053200166325ustar00rootroot00000000000000fabric-2.6.0/sites/docs/concepts/authentication.rst000066400000000000000000000077561400143053200224220ustar00rootroot00000000000000============== Authentication ============== Even in the 'vanilla' OpenSSH client, authenticating to remote servers involves multiple potential sources for secrets and configuration; Fabric not only supports most of those, but has more of its own. This document outlines the available methods for setting authentication secrets. .. note:: Since Fabric itself tries not to reinvent too much Paramiko functionality, most of the time configuring authentication values boils down to "how to set keyword argument values for `SSHClient.connect `", which in turn means to set values inside either the ``connect_kwargs`` :doc:`config ` subtree, or the ``connect_kwargs`` keyword argument of `.Connection`. Private key files ================= Private keys stored on-disk are probably the most common auth mechanism for SSH. Fabric offers multiple methods of configuring which paths to use, most of which end up merged into one list of paths handed to ``SSHClient.connect(key_filename=[...])``, in the following order: - If a ``key_filename`` key exists in the ``connect_kwargs`` argument to `.Connection`, they come first in the list. (This is basically the "runtime" option for non-CLI users.) - The config setting ``connect_kwargs.key_filename`` can be set in a number of ways (as per the :doc:`config docs `) including via the :option:`--identity` CLI flag (which sets the ``overrides`` level of the config; so when this flag is used, key filename values from other config sources will be overridden.) This value comes next in the overall list. - Using an :ref:`ssh_config ` file with ``IdentityFile`` directives lets you share configuration with other SSH clients; such values come last. Encryption passphrases ---------------------- If your private key file is protected via a passphrase, it can be supplied in a handful of ways: - The ``connect_kwargs.passphrase`` config option is the most direct way to supply a passphrase to be used automatically. .. note:: Using actual on-disk config files for this type of material isn't always wise, but recall that the :doc:`configuration system ` is capable of loading data from other sources, such as your shell environment or even arbitrary remote databases. - If you prefer to enter the passphrase manually at runtime, you may use the command-line option :option:`--prompt-for-passphrase`, which will cause Fabric to interactively prompt the user at the start of the process, and store the entered value in ``connect_kwargs.passphrase`` (at the 'overrides' level.) Private key objects =================== Instantiate your own `PKey ` object (see its subclasses' API docs for details) and place it into ``connect_kwargs.pkey``. That's it! You'll be responsible for any handling of passphrases, if the key material you're loading (these classes can load from file paths or strings) is encrypted. SSH agents ========== By default (similar to how OpenSSH behaves) Paramiko will attempt to connect to a running SSH agent (Unix style, e.g. a live ``SSH_AUTH_SOCK``, or Pageant if one is on Windows). This can be disabled by setting ``connect_kwargs.allow_agent`` to ``False``. Passwords ========= Password authentication is relatively straightforward: - You can configure it via ``connect_kwargs.password`` directly. - If you want to be prompted for it at the start of a session, specify :option:`--prompt-for-login-password`. .. TODO: host-configuration hooks are very important here, when implemented GSSAPI ====== Fabric doesn't provide any extra GSSAPI support on top of Paramiko's existing connect-time parameters (see e.g. ``gss_kex``/``gss_auth``/``gss_host``/etc in `SSHClient.connect `) and the modules implementing the functionality itself (such as `paramiko.ssh_gss`.) Thus, as usual, you should be looking to modify the ``connect_kwargs`` configuration tree. fabric-2.6.0/sites/docs/concepts/configuration.rst000066400000000000000000000232721400143053200222410ustar00rootroot00000000000000.. _fab-configuration: ============= Configuration ============= Basics ====== The heart of Fabric's configuration system (as with much of the rest of Fabric) relies on Invoke functionality, namely `invoke.config.Config` (technically, a lightweight subclass, `fabric.config.Config`). For practical details on what this means re: configuring Fabric's behavior, please see :ref:`Invoke's configuration documentation `. The primary differences from that document are as follows: * The configuration file paths sought are all named ``fabric.*`` instead of ``invoke.*`` - e.g. ``/etc/fabric.yml`` instead of ``/etc/invoke.yml``, ``~/.fabric.py`` instead of ``~/.invoke.py``, etc. * In addition to :ref:`Invoke's own default configuration values `, Fabric merges in some of its own, such as the fact that SSH's default port number is 22. See :ref:`default-values` for details. * Fabric has facilities for loading SSH config files, and will automatically create (or update) a configuration subtree on a per `Connection ` basis, loaded with the interpreted SSH configuration for that specific host (since an SSH config file is only ever useful via such a lens). See :ref:`ssh-config`. * Fabric plans to offer a framework for managing per-host and per-host-collection configuration details and overrides, though this is not yet implemented (it will be analogous to, but improved upon, the ``env.hosts`` and ``env.roles`` structures from Fabric 1.x). * This functionality will supplement that of the SSH config loading described earlier; we expect most users will prefer to configure as much as possible via an SSH config file, but not all Fabric settings have ``ssh_config`` analogues, nor do all use cases fit neatly into such files. .. _default-values: Default configuration values ============================ Overrides of Invoke-level defaults ---------------------------------- - ``run.replace_env``: ``True``, instead of ``False``, so that remote commands run with a 'clean', empty environment instead of inheriting a copy of the current process' environment. This is for security purposes: leaking local environment data remotely by default would be unsanitary. It's also compatible with the behavior of OpenSSH. .. seealso:: The warning under `paramiko.channel.Channel.set_environment_variable`. Extensions to Invoke-level defaults ----------------------------------- - ``runners.remote``: In Invoke, the ``runners`` tree has a single subkey, ``local`` (mapping to `~invoke.runners.Local`). Fabric adds this new subkey, ``remote``, which is mapped to `~fabric.runners.Remote`. New default values defined by Fabric ------------------------------------ .. note:: Most of these settings are also available in the constructor of `.Connection`, if they only need modification on a per-connection basis. .. warning:: Many of these are also configurable via :ref:`ssh_config files `. **Such values take precedence over those defined via the core configuration**, so make sure you're aware of whether you're loading such files (or :ref:`disable them to be sure `). - ``connect_kwargs``: Keyword arguments (`dict`) given to `SSHClient.connect ` when `.Connection` performs that method call. This is the primary configuration vector for many SSH-related options, such as selecting private keys, toggling forwarding of SSH agents, etc. Default: ``{}``. - ``forward_agent``: Whether to attempt forwarding of your local SSH authentication agent to the remote end. Default: ``False`` (same as in OpenSSH.) - ``gateway``: Used as the default value of the ``gateway`` kwarg for `.Connection`. May be any value accepted by that argument. Default: ``None``. - ``load_ssh_configs``: Whether to automatically seek out :ref:`SSH config files `. When ``False``, no automatic loading occurs. Default: ``True``. - ``port``: TCP port number used by `.Connection` objects when not otherwise specified. Default: ``22``. - ``inline_ssh_env``: Boolean serving as global default for the value of `.Connection`'s ``inline_ssh_env`` parameter; see its docs for details. Default: ``False``. - ``ssh_config_path``: Runtime SSH config path; see :ref:`ssh-config`. Default: ``None``. - ``timeouts``: Various timeouts, specifically: - ``connect``: Connection timeout, in seconds; defaults to ``None``, meaning no timeout / block forever. - ``user``: Username given to the remote ``sshd`` when connecting. Default: your local system username. .. _ssh-config: Loading and using ``ssh_config`` files ====================================== How files are loaded -------------------- Fabric uses Paramiko's SSH config file machinery to load and parse ``ssh_config``-format files (following OpenSSH's behavior re: which files to load, when possible): - An already-parsed `~paramiko.config.SSHConfig` object may be given to `.Config.__init__` via its ``ssh_config`` keyword argument; if this value is given, no files are loaded, even if they exist. - A runtime file path may be specified via configuration itself, as the ``ssh_config_path`` key; such a path will be loaded into a new `~paramiko.config.SSHConfig` object at the end of `.Config.__init__` and no other files will be sought out. - It will be filled in by the ``fab`` CLI tool if the :option:`--ssh-config` flag is given. - If no runtime config (object or path) was given to `.Config.__init__`, it will automatically seek out and load ``~/.ssh/config`` and/or ``/etc/ssh/ssh_config``, if they exist (and in that order.) .. note:: Rules present in both files will result in the user-level file 'winning', as the first rule found during lookup is always used. - If none of the above vectors yielded SSH config data, a blank/empty `~paramiko.config.SSHConfig` is the final result. - Regardless of how the object was generated, it is exposed as ``Config.base_ssh_config``. .. _connection-ssh-config: ``Connection``'s use of ``ssh_config`` values --------------------------------------------- `.Connection` objects expose a per-host 'view' of their config's SSH data (obtained via `~paramiko.config.SSHConfig.lookup`) as `.Connection.ssh_config`. `.Connection` itself references these values as described in the following subsections, usually as simple defaults for the appropriate config key or parameter (``port``, ``forward_agent``, etc.) Unless otherwise specified, these values override regular configuration values for the same keys, but may themselves be overridden by `.Connection.__init__` parameters. Take for example a ``~/.fabric.yaml``: .. code:: yaml user: foo Absent any other configuration, ``Connection('myhost')`` connects as the ``foo`` user. If we also have an ``~/.ssh/config``:: Host * User bar then ``Connection('myhost')`` connects as ``bar`` (the SSH config wins over the Fabric config.) *However*, in both cases, ``Connection('myhost', user='biz')`` will connect as ``biz``. .. note:: The below sections use capitalized versions of ``ssh_config`` keys for easier correlation with ``man ssh_config``, **but** the actual `~paramiko.config.SSHConfig` data structure is normalized to lowercase keys, since SSH config files are technically case-insensitive. Connection parameters ~~~~~~~~~~~~~~~~~~~~~ - ``Hostname``: replaces the original value of ``host`` (which is preserved as ``.original_host``.) - ``Port``: supplies the default value for the ``port`` config option / parameter. - ``User``: supplies the default value for the ``user`` config option / parameter. - ``ConnectTimeout``: sets the default value for the ``timeouts.connect`` config option / ``timeout`` parameter. Proxying ~~~~~~~~ - ``ProxyCommand``: supplies default (string) value for ``gateway``. - ``ProxyJump``: supplies default (`Connection `) value for ``gateway``. - Nested-style ``ProxyJump``, i.e. ``user1@hop1.host,user2@hop2.host,...``, will result in an appropriate series of nested ``gateway`` values under the hood - as if the user had manually specified ``Connecton(..., gateway=Connection('user1@hop1.host', gateway=Connection('user2@hop2.host', gateway=...)))``. .. note:: If both are specified for a given host, ``ProxyJump`` will override ``ProxyCommand``. This is slightly different from OpenSSH, where the order the directives are loaded determines which one wins. Doing so on our end (where we view the config as a dictionary structure) requires additional work. .. TODO: honor ProxyJump's comma-separated variant, which should translate to (reverse-ordered) nested Connection-style gateways. Authentication ~~~~~~~~~~~~~~ - ``ForwardAgent``: controls default behavior of ``forward_agent``. - ``IdentityFile``: appends to the ``key_filename`` key within ``connect_kwargs`` (similar to :option:`--identity`.) .. TODO: merge with per-host config when it's figured out .. _disabling-ssh-config: Disabling (most) ``ssh_config`` loading --------------------------------------- Users who need tighter control over how their environment gets configured may want to disable the automatic loading of system/user level SSH config files; this can prevent hard-to-expect errors such as a new user's ``~/.ssh/config`` overriding values that are being set in the regular config hierarchy. To do so, simply set the top level config option ``load_ssh_configs`` to ``False``. .. note:: Changing this setting does *not* disable loading of runtime-level config files (e.g. via :option:`-F`). If a user is explicitly telling us to load such a file, we assume they know what they're doing. fabric-2.6.0/sites/docs/concepts/networking.rst000066400000000000000000000072361400143053200215630ustar00rootroot00000000000000========== Networking ========== .. _ssh-gateways: SSH connection gateways ======================= Background ---------- When connecting to well-secured networks whose internal hosts are not directly reachable from the Internet, a common pattern is "bouncing", "gatewaying" or "proxying" SSH connections via an intermediate host (often called a "bastion", "gateway" or "jump box"). Gatewaying requires making an initial/outer SSH connection to the gateway system, then using that connection as a transport for the "real" connection to the final/internal host. At a basic level, one could ``ssh gatewayhost``, then ``ssh internalhost`` from the resulting shell. This works for individual long-running sessions, but becomes a burden when it must be done frequently. There are two gateway solutions available in Fabric, mirroring the functionality of OpenSSH's client: ``ProxyJump`` style (easier, less overhead, can be nested) or ``ProxyCommand`` style (more overhead, can't be nested, sometimes more flexible). Both support the usual range of configuration sources: Fabric's own config framework, SSH config files, or runtime parameters. ``ProxyJump`` ------------- This style of gateway uses the SSH protocol's ``direct-tcpip`` channel type - a lightweight method of requesting that the gateway's ``sshd`` open a connection on our behalf to another system. (This has been possible in OpenSSH server for a long time; support in OpenSSH's client is new as of 7.3.) Channel objects (instances of `paramiko.channel.Channel`) implement Python's socket API and are thus usable in place of real operating system sockets for nearly any Python code. ``ProxyJump`` style gatewaying is simple to use: create a new `.Connection` object parameterized for the gateway, and supply it as the ``gateway`` parameter when creating your inner/real `.Connection`:: from fabric import Connection c = Connection('internalhost', gateway=Connection('gatewayhost')) As with any other `.Connection`, the gateway connection may be configured with its own username, port number, and so forth. (This includes ``gateway`` itself - they can be chained indefinitely!) .. TODO: should it default to user/port from the 'outer' Connection? Some users may assume it will? (Probably most likely to assume user is preserved; port less so?) ``ProxyCommand`` ---------------- The traditional OpenSSH command-line client has long offered a ``ProxyCommand`` directive (see `man ssh_config `_), which pipes the inner connection's input and output through an arbitrary local subprocess. Compared to ``ProxyJump`` style gateways, this adds overhead (the extra subprocess) and can't easily be nested. In trade, it allows for advanced tricks like use of SOCKS proxies, or custom filtering/gatekeeping applications. ``ProxyCommand`` subprocesses are typically another ``ssh`` command, such as ``ssh -W %h:%p gatewayhost``; or (on SSH versions lacking ``-W``) the widely available ``netcat``, via ``ssh gatewayhost nc %h %p``. Fabric supports ``ProxyCommand`` by accepting command string objects in the ``gateway`` kwarg of `.Connection`; this is used to populate a `paramiko.proxy.ProxyCommand` object at connection time. Additional concerns ------------------- If you're unsure which of the two approaches to use: use ``ProxyJump`` style. It performs better, uses fewer resources on your local system, and has an easier-to-use API. .. warning:: Requesting both types of gateways simultaneously to the same host (i.e. supplying a `.Connection` as the ``gateway`` via kwarg or config, *and* loading a config file containing ``ProxyCommand``) is considered an error and will result in an exception. fabric-2.6.0/sites/docs/conf.py000066400000000000000000000014011400143053200163070ustar00rootroot00000000000000# Obtain shared config values import sys from os.path import abspath, join, dirname sys.path.append(abspath(join(dirname(__file__), ".."))) sys.path.append(abspath(join(dirname(__file__), "..", ".."))) from shared_conf import * # Enable & configure autodoc extensions.append("sphinx.ext.autodoc") autodoc_default_flags = ["members", "special-members"] # Default is 'local' building, but reference the public WWW site when building # under RTD. target = join(dirname(__file__), "..", "www", "_build") if on_rtd: target = "http://www.fabfile.org/" www = (target, None) # Intersphinx connection to www site intersphinx_mapping.update({"www": www}) # Sister-site links to WWW html_theme_options["extra_nav_links"] = { "Main website": "http://www.fabfile.org" } fabric-2.6.0/sites/docs/getting-started.rst000066400000000000000000000360711400143053200206620ustar00rootroot00000000000000=============== Getting started =============== Welcome! This tutorial highlights Fabric's core features; for further details, see the links within, or the documentation index which has links to conceptual and API doc sections. A note about imports ==================== Fabric composes a couple of other libraries as well as providing its own layer on top; user code will most often import from the ``fabric`` package, but you'll sometimes import directly from ``invoke`` or ``paramiko`` too: - `Invoke `_ implements CLI parsing, task organization, and shell command execution (a generic framework plus specific implementation for local commands.) - Anything that isn't specific to remote systems tends to live in Invoke, and it is often used standalone by programmers who don't need any remote functionality. - Fabric users will frequently import Invoke objects, in cases where Fabric itself has no need to subclass or otherwise modify what Invoke provides. - `Paramiko `_ implements low/mid level SSH functionality - SSH and SFTP sessions, key management, etc. - Fabric mostly uses this under the hood; users will only rarely import from Paramiko directly. - Fabric glues the other libraries together and provides its own high level objects too, e.g.: - Subclassing Invoke's context and command-runner classes, wrapping them around Paramiko-level primitives; - Extending Invoke's configuration system by using Paramiko's ``ssh_config`` parsing machinery; - Implementing new high-level primitives of its own, such as port-forwarding context managers. (These may, in time, migrate downwards into Paramiko.) .. TODO: we should probably rename Collection to be Namespace or something; it's too close to 'Connection' Run commands via Connections and ``run`` ======================================== The most basic use of Fabric is to execute a shell command on a remote system via SSH, then (optionally) interrogate the result. By default, the remote program's output is printed directly to your terminal, *and* captured. A basic example: .. testsetup:: basic mock = MockRemote() mock.expect(out=b'Linux\n') .. testcleanup:: basic mock.stop() .. doctest:: basic >>> from fabric import Connection >>> c = Connection('web1') >>> result = c.run('uname -s') Linux >>> result.stdout.strip() == 'Linux' True >>> result.exited 0 >>> result.ok True >>> result.command 'uname -s' >>> result.connection >>> result.connection.host 'web1' Meet `.Connection`, which represents an SSH connection and provides the core of Fabric's API, such as `~.Connection.run`. `.Connection` objects need at least a hostname to be created successfully, and may be further parameterized by username and/or port number. You can give these explicitly via args/kwargs:: Connection(host='web1', user='deploy', port=2202) Or by stuffing a ``[user@]host[:port]`` string into the ``host`` argument (though this is purely convenience; always use kwargs whenever ambiguity appears!):: Connection('deploy@web1:2202') `.Connection` objects' methods (like `~.Connection.run`) usually return instances of `invoke.runners.Result` (or subclasses thereof) exposing the sorts of details seen above: what was requested, what happened while the remote action occurred, and what the final result was. .. note:: Many lower-level SSH connection arguments (such as private keys and timeouts) can be given directly to the SSH backend by using the :ref:`connect_kwargs argument `. Superuser privileges via auto-response ====================================== Need to run things as the remote system's superuser? You could invoke the ``sudo`` program via `~.Connection.run`, and (if your remote system isn't configured with passwordless sudo) respond to the password prompt by hand, as below. (Note how we need to request a remote pseudo-terminal; most ``sudo`` implementations get grumpy at password-prompt time otherwise.) .. testsetup:: sudo-by-hand mock = MockRemote() mock.expect(commands=( Command(out=b'[sudo] password:\n'), Command(out=b'1001\n'), )) .. testcleanup:: sudo-by-hand mock.stop() .. doctest:: sudo-by-hand >>> from fabric import Connection >>> c = Connection('db1') >>> c.run('sudo useradd mydbuser', pty=True) [sudo] password: >>> c.run('id -u mydbuser') 1001 Giving passwords by hand every time can get old; thankfully Invoke's powerful command-execution functionality includes the ability to :ref:`auto-respond ` to program output with pre-defined input. We can use this for ``sudo``: .. testsetup:: sudo-with-responses mock = MockRemote() mock.expect(out=b'[sudo] password:\nroot\n', in_=b'mypassword\n') .. testcleanup:: sudo-with-responses mock.stop() .. doctest:: sudo-with-responses >>> from invoke import Responder >>> from fabric import Connection >>> c = Connection('host') >>> sudopass = Responder( ... pattern=r'\[sudo\] password:', ... response='mypassword\n', ... ) >>> c.run('sudo whoami', pty=True, watchers=[sudopass]) [sudo] password: root It's difficult to show in a snippet, but when the above was executed, the user didn't need to type anything; ``mypassword`` was sent to the remote program automatically. Much easier! The ``sudo`` helper ------------------- Using watchers/responders works well here, but it's a lot of boilerplate to set up every time - especially as real-world use cases need more work to detect failed/incorrect passwords. To help with that, Invoke provides a `Context.sudo ` method which handles most of the boilerplate for you (as `.Connection` subclasses `~invoke.context.Context`, it gets this method for free.) `~invoke.context.Context.sudo` doesn't do anything users can't do themselves - but as always, common problems are best solved with commonly shared solutions. All the user needs to do is ensure the ``sudo.password`` :doc:`configuration value ` is filled in (via config file, environment variable, or :option:`--prompt-for-sudo-password`) and `.Connection.sudo` handles the rest. For the sake of clarity, here's an example where a library/shell user performs their own `getpass`-based password prompt: .. testsetup:: sudo from __future__ import print_function from mock import patch gp_patcher = patch('getpass.getpass', side_effect=lambda x: print(x)) gp_patcher.start() mock = MockRemote() mock.expect(commands=( Command(out=b'root\n'), Command(), Command(out=b'1001\n'), )) .. testcleanup:: sudo mock.stop() gp_patcher.stop() .. doctest:: sudo :options: +ELLIPSIS >>> import getpass >>> from fabric import Connection, Config >>> sudo_pass = getpass.getpass("What's your sudo password?") What's your sudo password? >>> config = Config(overrides={'sudo': {'password': sudo_pass}}) >>> c = Connection('db1', config=config) >>> c.sudo('whoami', hide='stderr') root >>> c.sudo('useradd mydbuser') >>> c.run('id -u mydbuser') 1001 We filled in the sudo password up-front at runtime in this example; in real-world situations, you might also supply it via the configuration system (perhaps using environment variables, to avoid polluting config files), or ideally, use a secrets management system. Transfer files ============== Besides shell command execution, the other common use of SSH connections is file transfer; `.Connection.put` and `.Connection.get` exist to fill this need. For example, say you had an archive file you wanted to upload: .. testsetup:: transfers mock = MockSFTP() .. testcleanup:: transfers mock.stop() .. doctest:: transfers >>> from fabric import Connection >>> result = Connection('web1').put('myfiles.tgz', remote='/opt/mydata/') >>> print("Uploaded {0.local} to {0.remote}".format(result)) Uploaded /local/myfiles.tgz to /opt/mydata/ These methods typically follow the behavior of ``cp`` and ``scp``/``sftp`` in terms of argument evaluation - for example, in the above snippet, we omitted the filename part of the remote path argument. Multiple actions ================ One-liners are good examples but aren't always realistic use cases - one typically needs multiple steps to do anything interesting. At the most basic level, you could do this by calling `.Connection` methods multiple times:: from fabric import Connection c = Connection('web1') c.put('myfiles.tgz', '/opt/mydata') c.run('tar -C /opt/mydata -xzvf /opt/mydata/myfiles.tgz') You could (but don't have to) turn such blocks of code into functions, parameterized with a `.Connection` object from the caller, to encourage reuse:: def upload_and_unpack(c): c.put('myfiles.tgz', '/opt/mydata') c.run('tar -C /opt/mydata -xzvf /opt/mydata/myfiles.tgz') As you'll see below, such functions can be handed to other API methods to enable more complex use cases as well. Multiple servers ================ Most real use cases involve doing things on more than one server. The straightforward approach could be to iterate over a list or tuple of `.Connection` arguments (or `.Connection` objects themselves, perhaps via ``map``):: >>> from fabric import Connection >>> for host in ('web1', 'web2', 'mac1'): >>> result = Connection(host).run('uname -s') ... print("{}: {}".format(host, result.stdout.strip())) ... ... web1: Linux web2: Linux mac1: Darwin This approach works, but as use cases get more complex it can be useful to think of a collection of hosts as a single object. Enter `.Group`, a class wrapping one-or-more `.Connection` objects and offering a similar API; specifically, you'll want to use one of its concrete subclasses like `.SerialGroup` or `.ThreadingGroup`. The previous example, using `.Group` (`.SerialGroup` specifically), looks like this:: >>> from fabric import SerialGroup as Group >>> results = Group('web1', 'web2', 'mac1').run('uname -s') >>> print(results) : , : , : , }> >>> for connection, result in results.items(): ... print("{0.host}: {1.stdout}".format(connection, result)) ... ... web1: Linux web2: Linux mac1: Darwin Where `.Connection` methods return single ``Result`` objects (e.g. `fabric.runners.Result`), `.Group` methods return `.GroupResult` - `dict`-like objects offering access to individual per-connection results as well as metadata about the entire run. When any individual connections within the `.Group` encounter errors, the `.GroupResult` is lightly wrapped in a `.GroupException`, which is raised. Thus the aggregate behavior resembles that of individual `.Connection` methods, returning a value on success or raising an exception on failure. Bringing it all together ======================== Finally, we arrive at the most realistic use case: you've got a bundle of commands and/or file transfers and you want to apply it to multiple servers. You *could* use multiple `.Group` method calls to do this:: from fabric import SerialGroup as Group pool = Group('web1', 'web2', 'web3') pool.put('myfiles.tgz', '/opt/mydata') pool.run('tar -C /opt/mydata -xzvf /opt/mydata/myfiles.tgz') That approach falls short as soon as logic becomes necessary - for example, if you only wanted to perform the copy-and-untar above when ``/opt/mydata`` is empty. Performing that sort of check requires execution on a per-server basis. You could fill that need by using iterables of `.Connection` objects (though this foregoes some benefits of using `Groups <.Group>`):: from fabric import Connection for host in ('web1', 'web2', 'web3'): c = Connection(host) if c.run('test -f /opt/mydata/myfile', warn=True).failed: c.put('myfiles.tgz', '/opt/mydata') c.run('tar -C /opt/mydata -xzvf /opt/mydata/myfiles.tgz') Alternatively, remember how we used a function in that earlier example? You can go that route instead:: from fabric import SerialGroup as Group def upload_and_unpack(c): if c.run('test -f /opt/mydata/myfile', warn=True).failed: c.put('myfiles.tgz', '/opt/mydata') c.run('tar -C /opt/mydata -xzvf /opt/mydata/myfiles.tgz') for connection in Group('web1', 'web2', 'web3'): upload_and_unpack(connection) The only convenience this final approach lacks is a useful analogue to `.Group.run` - if you want to track the results of all the ``upload_and_unpack`` call as an aggregate, you have to do that yourself. Look to future feature releases for more in this space! Addendum: the ``fab`` command-line tool ======================================= It's often useful to run Fabric code from a shell, e.g. deploying applications or running sysadmin jobs on arbitrary servers. You could use regular :ref:`Invoke tasks ` with Fabric library code in them, but another option is Fabric's own "network-oriented" tool, ``fab``. ``fab`` wraps Invoke's CLI mechanics with features like host selection, letting you quickly run tasks on various servers - without having to define ``host`` kwargs on all your tasks or similar. .. note:: This mode was the primary API of Fabric 1.x; as of 2.0 it's just a convenience. Whenever your use case falls outside these shortcuts, it should be easy to revert to the library API directly (with or without Invoke's less opinionated CLI tasks wrapped around it). For a final code example, let's adapt the previous example into a ``fab`` task module called ``fabfile.py``:: from fabric import task @task def upload_and_unpack(c): if c.run('test -f /opt/mydata/myfile', warn=True).failed: c.put('myfiles.tgz', '/opt/mydata') c.run('tar -C /opt/mydata -xzvf /opt/mydata/myfiles.tgz') Not hard - all we did was copy our temporary task function into a file and slap a decorator on it. `~fabric.tasks.task` tells the CLI machinery to expose the task on the command line:: $ fab --list Available tasks: upload_and_unpack Then, when ``fab`` actually invokes a task, it knows how to stitch together arguments controlling target servers, and run the task once per server. To run the task once on a single server:: $ fab -H web1 upload_and_unpack When this occurs, ``c`` inside the task is set, effectively, to ``Connection("web1")`` - as in earlier examples. Similarly, you can give more than one host, which runs the task multiple times, each time with a different `.Connection` instance handed in:: $ fab -H web1,web2,web3 upload_and_unpack fabric-2.6.0/sites/docs/index.rst000066400000000000000000000022261400143053200166570ustar00rootroot00000000000000================================== Welcome to Fabric's documentation! ================================== This site covers Fabric's usage & API documentation. For basic info on what Fabric is, including its public changelog & how the project is maintained, please see `the main project website `_. Getting started --------------- Many core ideas & API calls are explained in the tutorial/getting-started document: .. toctree:: :maxdepth: 2 getting-started Upgrading from 1.x ------------------ Looking to upgrade from Fabric 1.x? See our :ref:`detailed upgrade guide ` on the nonversioned main project site. .. _concepts-docs: Concepts -------- Dig deeper into specific topics: .. toctree:: :maxdepth: 2 :glob: concepts/* The ``fab`` CLI tool -------------------- Details on the CLI interface to Fabric, how it extends Invoke's CLI machinery, and examples of shortcuts for executing tasks across hosts or groups. .. toctree:: cli .. _api-docs: API --- Know what you're looking for & just need API details? View our auto-generated API documentation: .. toctree:: :maxdepth: 1 :glob: api/* fabric-2.6.0/sites/docs/upgrading.rst000066400000000000000000000004041400143053200175240ustar00rootroot00000000000000:orphan: ================== Upgrading - moved! ================== If you're here, you're probably following an old link or bookmark. The upgrading page has moved to the unversioned main project site: :ref:`upgrading`. Please update your bookmarks and links! fabric-2.6.0/sites/shared_conf.py000066400000000000000000000045031400143053200167130ustar00rootroot00000000000000import os from os.path import join, dirname, abspath from datetime import datetime import alabaster # Alabaster theme + mini-extension html_theme_path = [alabaster.get_path()] extensions = ["alabaster", "sphinx.ext.intersphinx"] # Paths relative to invoking conf.py - not this shared file html_static_path = [join("..", "_shared_static")] html_theme = "alabaster" html_theme_options = { "logo": "logo.png", "logo_name": True, "logo_text_align": "center", "description": "Pythonic remote execution", "github_user": "fabric", "github_repo": "fabric", "travis_button": True, "codecov_button": True, "tidelift_url": "https://tidelift.com/subscription/pkg/pypi-fabric?utm_source=pypi-fabric&utm_medium=referral&utm_campaign=docs", "analytics_id": "UA-18486793-1", "link": "#3782BE", "link_hover": "#3782BE", # Wide enough that 80-col code snippets aren't truncated on default font # settings (at least for bitprophet's Chrome-on-OSX-Yosemite setup) "page_width": "1024px", } html_sidebars = { "**": ["about.html", "navigation.html", "searchbox.html", "donate.html"] } # Enable & configure doctest extensions.append("sphinx.ext.doctest") doctest_global_setup = r""" from fabric.testing.base import MockRemote, MockSFTP, Session, Command """ on_rtd = os.environ.get("READTHEDOCS") == "True" on_travis = os.environ.get("TRAVIS", False) on_dev = not (on_rtd or on_travis) # Invoke (docs + www) inv_target = join( dirname(__file__), "..", "..", "invoke", "sites", "docs", "_build" ) if not on_dev: inv_target = "http://docs.pyinvoke.org/en/latest/" inv_www_target = join( dirname(__file__), "..", "..", "invoke", "sites", "www", "_build" ) if not on_dev: inv_www_target = "http://pyinvoke.org/" # Paramiko (docs) para_target = join( dirname(__file__), "..", "..", "paramiko", "sites", "docs", "_build" ) if not on_dev: para_target = "http://docs.paramiko.org/en/latest/" intersphinx_mapping = { "python": ("http://docs.python.org/", None), "invoke": (inv_target, None), "invoke_www": (inv_www_target, None), "paramiko": (para_target, None), } # Regular settings project = "Fabric" year = datetime.now().year copyright = "%d Jeff Forcier" % year master_doc = "index" templates_path = ["_templates"] exclude_trees = ["_build"] source_suffix = ".rst" default_role = "obj" fabric-2.6.0/sites/www/000077500000000000000000000000001400143053200147105ustar00rootroot00000000000000fabric-2.6.0/sites/www/changelog-v1.rst000066400000000000000000001413341400143053200177230ustar00rootroot00000000000000=============== Changelog (1.x) =============== .. note:: This is the changelog for the legacy 1.x version of Fabric. For the current (2.0+) changelog, please see :doc:`the main changelog `. * :release:`1.14.1 <2018-11-27>` * :bug:`1341` (via :issue:`1586`) Attempt to ``rm -f`` the temporary file used by ``put``'s sudo mode, when exceptions are encountered; previously, the internal ``sudo mv`` call could potentially fail and leave the file around. Thanks to Andrei Sura for the report and Uku Loskit for the fix. * :bug:`1242` (via :issue:`1243`) `~fabric.contrib.project.rsync_project`: only supply the ``-p `` option to generated ``rsync`` commands when the port number differs from the default; this allows removing ``--rsh`` entirely most of the time, and thus enables things like using rsync's daemon mode on the remote end. Reported & patched by Arnaud Rocher. * :bug:`1227` Remove a bash/zsh-ism from `~fabric.contrib.files.upload_template` when backing up the target file, preventing issues on simpler remote shells. Patch courtesy of Paul Chakravarti. * :bug:`983` Move a ``getpass`` import inside a Windows-oriented ``try``/``except ImportError`` so password prompting is less likely to explode on certain systems. Thanks to ``@dongweiming`` for the patch. * :support:`- backported` Update packaging metadata so wheel archives include the ``LICENSE`` file. * :release:`1.14.0 <2017-08-25>` * :feature:`1475` Honor ``env.timeout`` when opening new remote sessions (as opposed to the initial overall connection, which already honored timeout settings.) Thanks to ``@EugeniuZ`` for the report & ``@jrmsgit`` for the first draft of the patch. .. note:: This feature only works with Paramiko 1.14.3 and above; if your Paramiko version is older, no timeout can be set, and the previous behavior will occur instead. * :release:`1.13.2 <2017-04-24>` * :release:`1.12.2 <2017-04-24>` * :bug:`1542` (via :issue:`1543`) Catch Paramiko-level gateway connection errors (``ChannelError``) when raising ``NetworkError``; this prevents an issue where gateway related issues were being treated as authentication errors. Thanks to Charlie Stanley for catch & patch. * :bug:`1555` Multiple simultaneous `~fabric.operations.get` and/or `~fabric.operations.put` with ``use_sudo=True`` and for the same remote host and path could fail unnecessarily. Thanks ``@arnimarj`` for the report and Pierce Lopez for the patch. * :bug:`1427` (via :issue:`1428`) Locate ``.pyc`` files when searching for fabfiles to load; previously we only used the presence of ``.py`` files to determine whether loading should be attempted. Credit: Ray Chen. * :bug:`1294` fix text escaping for `~fabric.contrib.files.contains` and `~fabric.contrib.files.append` which would fail if the text contained e.g. ``>``. Thanks to ``@ecksun`` for report & Pierce Lopez for the patch. * :support:`1065 backported` Fix incorrect SSH config reference in the docs for ``env.keepalive``; it corresponds to ``ServerAliveInterval``, not ``ClientAliveInterval``. Credit: Harry Percival. * :bug:`1574` `~fabric.contrib.project.upload_project` failed for folder in current directory specified without any path separator. Thanks ``@aidanmelen`` for the report and Pierce Lopez for the patch. * :support:`1590 backported` Replace a reference to ``fab`` in a test subprocess, to use the ``python -m `` style instead; this allows ``python setup.py test`` to run the test suite without having Fabric already installed. Thanks to ``@BenSturmfels`` for catch & patch. * :support:`- backported` Backport :issue:`1462` to 1.12.x (was previously only backported to 1.13.x.) * :support:`1416 backported` Add explicit "Python 2 only" note to ``setup.py`` trove classifiers to help signal that fact to various info-gathering tools. Patch courtesy of Gavin Bisesi. * :bug:`1526` Disable use of PTY and shell for a background command execution within ``contrib.sed``, preventing a small class of issues on some platforms/environments. Thanks to ``@doflink`` for the report and Pierce Lopez for the final patch. * :support:`1539 backported` Add documentation for ``env.output_prefix``. Thanks ``@jphalip``. * :bug:`1514` Compatibility with Python 2.5 was broken by using the ``format()`` method of a string (only in 1.11+). Report by ``@pedrudehuere``. * :release:`1.13.1 <2016-12-09>` * :bug:`1462` Make a PyCrypto-specific import and method call optional to avoid ``ImportError`` problems under Paramiko 2.x. Thanks to Alex Gaynor for catch & patch! * :release:`1.13.0 <2016-12-09>` * :support:`1461` Update setup requirements to allow Paramiko 2.x, now that it's stable and been out in the wild for some time. Paramiko 1.x still works like it always did; the only change to Paramiko 2 was the backend moving from PyCrypto to Cryptography. .. warning:: If you are upgrading an existing environment, the install dependencies have changed; please see Paramiko's installation docs for details: http://www.paramiko.org/installing.html * :release:`1.12.1 <2016-12-05>` * :release:`1.11.3 <2016-12-05>` * :release:`1.10.5 <2016-12-05>` * :bug:`1470` When using ``fabric.operations.get`` with glob expressions, a lack of matches for the glob would result in an empty file named after the glob expression (in addition to raising an error). This has been fixed so the empty file is no longer generated. Thanks to Georgy Kibardin for the catch & initial patch. * :feature:`1495` Update the internals of ``fabric.contrib.files`` so its members work with SSH servers running on Windows. Thanks to Hamdi Sahloul for the patch. * :support:`1483 backported` (also re: :issue:`1386`, :issue:`1374`, :issue:`1300`) Add an FAQ about quote problems in remote ``csh`` causing issues with Fabric's shell-wrapping and quote-escaping. Thanks to Michael Radziej for the update. * :support:`1379 backported` (also :issue:`1464`) Clean up a lot of unused imports and similar cruft (many found via ``flake8 --select E4``). Thanks to Mathias Ertl for the original patches. * :bug:`1458` Detect ``known_hosts``-related instances of ``paramiko.SSHException`` and prevent them from being handled like authentication errors (which is the default behavior). This fixes issues with incorrect password prompts or prompt-related exceptions when using ``reject_unknown_hosts`` and encountering missing or bad ``known_hosts`` entries. Thanks to Lukáš Doktor for catch & patch. * :release:`1.12.0 <2016-07-25>` * :release:`1.11.2 <2016-07-25>` * :release:`1.10.4 <2016-07-25>` * :feature:`1491` Implement ``sudo``-specific password caching. This can be used to work around issues where over-eager submission of ``env.password`` at login time causes authentication problems (e.g. during two-factor auth). * :bug:`1447` Fix a relative import in ``fabric.network`` to be correctly/consistently absolute instead. Thanks to ``@bildzeitung`` for catch & patch. * :release:`1.11.1 <2016-04-09>` * :bug:`- (==1.11)` Bumped version to ``1.11.1`` due to apparently accidentally uploading a false ``1.11.0`` to PyPI sometime in the past (PyPI is secure & prevents reusing deleted filenames.) We have no memory of this, but databases don't lie! * :release:`1.11.0 <2016-04-09>` * :release:`1.10.3 <2016-04-09>` * :bug:`1135` (via :issue:`1241`) Modified order of operations in ``fabric.operations.run``/``fabric.operations.sudo`` to apply environment vars before prefixing commands (instead of after). Report by ``@warsamebashir``, patch by Curtis Mattoon. * :feature:`1203` (via :issue:`1240`) Add a ``case_sensitive`` kwarg to ``fabric.contrib.files.contains`` (which toggles use of ``egrep -i``). Report by ``@xoul``, patch by Curtis Mattoon. * :feature:`800` Add ``capture_buffer_size`` kwarg to ``fabric.operations.run``/``fabric.operations.sudo`` so users can limit memory usage in situations where subprocesses generate very large amounts of stdout/err. Thanks to Jordan Starcher for the report & Omri Bahumi for an early version of the patchset. * :feature:`1161` Add ``use_sudo`` kwarg to ``fabric.operations.reboot``. Credit: Bryce Verdier. * :support:`943 backported` Tweak ``env.warn_only`` docs to note that it applies to all operations, not just ``run``/``sudo``. Thanks ``@akitada``. * :feature:`932` Add a ``temp_dir`` kwarg to ``fabric.contrib.files.upload_template`` which is passed into its inner ``fabric.operations.put`` call. Thanks to ``@nburlett`` for the patch. * :support:`1257 backported` Add notes to the usage docs for ``fab`` regarding the program's exit status. Credit: ``@koalaman``. * :feature:`1261` Expose Paramiko's Kerberos functionality as Fabric config vars & command-line options. Thanks to Ramanan Sivaranjan for catch & patch, and to Johannes Löthberg & Michael Bennett for additional testing. * :feature:`1271` Allow users whose fabfiles use ``fabric.colors`` to disable colorization at runtime by specifying ``FABRIC_DISABLE_COLORS=1`` (or any other non-empty value). Credit: Eric Berg. * :feature:`1326` Make ``fabric.contrib.project.rsync_project`` aware of ``env.gateway``, using a ``ProxyCommand`` under the hood. Credit: David Rasch. * :support:`1359` Add a more-visible top-level ``CHANGELOG.rst`` pointing users to the actual changelog stored within the Sphinx directory tree. Thanks to Jonathan Vanasco for catch & patch. * :feature:`1388` Expose Jinja's ``keep_trailing_newline`` parameter in ``fabric.contrib.files.upload_template`` so users can force template renders to preserve trailing newlines. Thanks to Chen Lei for the patch. * :bug:`1389 major` Gently overhaul SSH port derivation so it's less surprising; previously, any non-default value stored in ``env.port`` was overriding all SSH-config derived values. See the API docs for ``fabric.network.normalize`` for details on how it now behaves. Thanks to Harry Weppner for catch & patch. * :support:`1454 backported` Remove use of ``:option:`` directives in the changelog, it's currently broken in modern Sphinx & doesn't seem to have actually functioned on Renaissance-era Sphinx either. * :bug:`1365` (via :issue:`1372`) Classic-style fabfiles (ones not using ``@task``) erroneously included custom exception subclasses when collecting tasks. This is now fixed thanks to ``@mattvonrocketstein``. * :bug:`1348` (via :issue:`1361`) Fix a bug in ``fabric.operations.get`` where remote file paths containing Python string formatting escape codes caused an exception. Thanks to ``@natecode`` for the report and Bradley Spink for the fix. * :release:`1.10.2 <2015-06-19>` * :support:`1325` Clarify ``fabric.operations.put`` docs re: the ``mode`` argument. Thanks to ``@mjmare`` for the catch. * :bug:`1318` Update functionality added in :issue:`1213` so abort error messages don't get printed twice (once by us, once by ``sys.exit``) but the annotated exception error message is retained. Thanks to Felix Almeida for the report. * :bug:`1305` (also :issue:`1313`) Fix a couple minor issues with the operation of & demo code for the ``JobQueue`` class. Thanks to ``@dioh`` and Horst Gutmann for the report & Cameron Lane for the patch. * :bug:`980` (also :issue:`1312`) Redirect output of ``cd`` to ``/dev/null`` so users enabling bash's ``CDPATH`` (or similar features in other shells) don't have polluted output captures. Thanks to Alex North-Keys for the original report & Steve Ivy for the fix. * :bug:`1289` Fix "NameError: free variable referenced before assignment in enclosing scope". Thanks to ``@SamuelMarks`` for catch & patch. * :bug:`1286` (also :issue:`971`, :issue:`1032`) Recursively unwrap decorators instead of only unwrapping a single decorator level, when obtaining task docstrings. Thanks to Avishai Ish-Shalom for the original report & Max Kovgan for the patch. * :bug:`1273` Fix issue with ssh/config not having a cross-platform default path. Thanks to ``@SamuelMarks`` for catch & patch. * :feature:`1200` Introduced ``exceptions`` output level, so users don't have to deal with the debug output just to see tracebacks. * :support:`1239` Update README to work better under raw docutils so the example code block is highlighted as Python on PyPI (and not just on our Sphinx-driven website). Thanks to Marc Abramowitz. * :release:`1.10.1 <2014-12-19>` * :release:`1.9.2 <2014-12-19>` * :bug:`1201` Don't naively glob all ``fabric.operations.get`` targets - only glob actual directories. This avoids incorrectly yielding permission errors in edge cases where a requested file is within a directory lacking the read permission bit. Thanks to Sassa Nf for the original report. * :bug:`1019` (also :issue:`1022`, :issue:`1186`) Fix "is a tty" tests in environments where streams (eg ``sys.stdout``) have been replaced with objects lacking a ``.isatty()`` method. Thanks to Miki Tebeka for the original report, Lele Long for a subsequent patch, and Julien Phalip for the final/merged patch. * :support:`1213 backported` Add useful exception message to the implicit ``SystemExit`` raised by Fabric's use of ``sys.exit`` inside the ``fabric.api.abort`` function. This allows client code catching ``SystemExit`` to have better introspection into the error. Thanks to Ioannis Panousis. * :bug:`1228` Update the ``CommandTimeout`` class so it has a useful ``str`` instead of appearing blank when caught by Fabric's top level exception handling. Catch & patch from Tomaz Muraus. * :bug:`1180` Fix issue with unicode steam outputs crashing if stream encoding type is None. Thanks to ``@joekiller`` for catch & patch. * :support:`958 backported` Remove the Git SHA portion of our version string generation; it was rarely useful & occasionally caused issues for users with non-Git-based source checkouts. * :support:`1229 backported` Add some missing API doc hyperlink references. Thanks to Tony Narlock. * :bug:`1226` Update ``fabric.operations.get`` to ensure that ``env.user`` has access to tempfiles before changing permissions. Also corrected permissions from 404 to 0400 to match comment. Patch by Curtis Mattoon; original report from Daniel Watkins. * :release:`1.10.0 <2014-09-04>` * :bug:`1188 major` Update ``fabric.operations.local`` to close non-pipe file descriptors in the child process so subsequent calls to ``fabric.operations.local`` aren't blocked on e.g. already-connected network sockets. Thanks to Tolbkni Kao for catch & patch. * :feature:`700` Added ``use_sudo`` and ``temp_dir`` params to ``fabric.operations.get``. This allows downloading files normally not accessible to the user using ``sudo``. Thanks to Jason Coombs for initial report and to Alex Plugaru for the patch (:issue:`1121`). * :feature:`1098` Add support for dict style roledefs. Thanks to Jonas Lundberg. * :feature:`1090` Add option to skip unknown tasks. Credit goes to Jonas Lundberg. * :feature:`975` Fabric can now be invoked via ``python -m fabric`` in addition to the typical use of the ``fab`` entrypoint. Patch courtesy of Jason Coombs. .. note:: This functionality is only available under Python 2.7. * :release:`1.9.1 <2014-08-06>` * :release:`1.8.5 <2014-08-06>` * :release:`1.7.5 <2014-08-06>` * :bug:`1165` Prevent infinite loop condition when a gateway host is enabled & the same host is in the regular target host list. Thanks to ``@CzBiX`` for catch & patch. * :bug:`1147` Use ``stat`` instead of ``lstat`` when testing directory-ness in the SFTP module. This allows recursive downloads to avoid recursing into symlinks unexpectedly. Thanks to Igor Kalnitsky for the patch. * :bug:`1146` Fix a bug where ``fabric.contrib.files.upload_template`` failed to honor ``lcd`` when ``mirror_local_mode`` is ``True``. Thanks to Laszlo Marai for catch & patch. * :bug:`1134` Skip bad hosts when the tasks are executed in parallel. Thanks to Igor Maravić ``@i-maravic``. * :bug:`852` Fix to respect ``template_dir`` for non Jinja2 templates in ``fabric.contrib.files.upload_template``. Thanks to Adam Kowalski for the patch and Alex Plugaru for the initial test case. * :bug:`1096` Encode Unicode text appropriately for its target stream object to avoid issues on non-ASCII systems. Thanks to Toru Uetani for the original patch. * :bug:`1059` Update IPv6 support to work with link-local address formats. Fix courtesy of ``@obormot``. * :bug:`1026` Fix a typo preventing quiet operation of ``fabric.contrib.files.is_link``. Caught by ``@dongweiming``. * :bug:`600` Clear out connection caches in full when prepping parallel-execution subprocesses. This avoids corner cases causing hangs/freezes due to client/socket reuse. Thanks to Ruslan Lutsenko for the initial report and Romain Chossart for the suggested fix. * :bug:`1167` Add Jinja to ``test_requires`` in ``setup.py`` for the couple of newish tests that now require it. Thanks to Kubilay Kocak for the catch. * :release:`1.9.0 <2014-06-08>` * :feature:`1078` Add ``.command`` and ``.real_command`` attributes to ``local`` return value. Thanks to Alexander Teves (``@alexanderteves``) and Konrad HaÅ‚as (``@konradhalas``). * :feature:`938` Add an env var ``env.effective_roles`` specifying roles used in the currently executing command. Thanks to Piotr Betkier for the patch. * :feature:`1101` Reboot operation now supports custom command. Thanks to Jonas Lejon. * :support:`1106` Fix a misleading/ambiguous example snippet in the ``fab`` usage docs to be clearer. Thanks to ``@zed``. * :release:`1.8.4 <2014-06-08>` * :release:`1.7.4 <2014-06-08>` * :bug:`898` Treat paths that begin with tilde "~" as absolute paths instead of relative. Thanks to Alex Plugaru for the patch and Dan Craig for the suggestion. * :support:`1105 backported` Enhance ``setup.py`` to allow Paramiko 1.13+ under Python 2.6+. Thanks to to ``@Arfrever`` for catch & patch. * :release:`1.8.3 <2014-03-21>` * :release:`1.7.3 <2014-03-21>` * :support:`- backported` Modified packaging data to reflect that Fabric requires Paramiko < 1.13 (which dropped Python 2.5 support.) * :feature:`1082` Add ``pty`` passthrough kwarg to ``fabric.contrib.files.upload_template``. * :release:`1.8.2 <2014-02-14>` * :release:`1.7.2 <2014-02-14>` * :bug:`955` Quote directories created as part of ``put``'s recursive directory uploads when ``use_sudo=True`` so directories with shell meta-characters (such as spaces) work correctly. Thanks to John Harris for the catch. * :bug:`917` Correct an issue with ``put(use_sudo=True, mode=xxx)`` where the ``chmod`` was trying to apply to the wrong location. Thanks to Remco (``@nl5887``) for catch & patch. * :bug:`1046` Fix typo preventing use of ProxyCommand in some situations. Thanks to Keith Yang. * :release:`1.8.1 <2013-12-24>` * :release:`1.7.1 <2013-12-24>` * :release:`1.6.4 <2013-12-24>` 956, 957 * :release:`1.5.5 <2013-12-24>` 956, 957 * :bug:`956` Fix pty size detection when running inside Emacs. Thanks to ``@akitada`` for catch & patch. * :bug:`957` Fix bug preventing use of ``env.gateway`` with targets requiring password authentication. Thanks to Daniel González, ``@Bengrunt`` and ``@adrianbn`` for their bug reports. * :feature:`741` Add ``env.prompts`` dictionary, allowing users to set up custom prompt responses (similar to the built-in sudo prompt auto-responder.) Thanks to Nigel Owens and David Halter for the patch. * :bug:`965 major` Tweak IO flushing behavior when in linewise (& thus parallel) mode so interwoven output is less frequent. Thanks to ``@akidata`` for catch & patch. * :bug:`948` Handle connection failures due to server load and try connecting to hosts a number of times specified in ``env.connection_attempts``. * :release:`1.8.0 <2013-09-20>` * :feature:`931` Allow overriding of ``abort`` behavior via a custom exception-returning callable set as ``env.abort_exception``. Thanks to Chris Rose for the patch. * :support:`984 backported` Make this changelog easier to read! Now with per-release sections, generated automatically from the old timeline source format. * :feature:`910` Added a keyword argument to rsync_project to configure the default options. Thanks to ``@moorepants`` for the patch. * :release:`1.7.0 <2013-07-26>` * :release:`1.6.2 <2013-07-26>` * :feature:`925` Added ``contrib.files.is_link``. Thanks to ``@jtangas`` for the patch. * :feature:`922` Task argument strings are now displayed when using ``fab -d``. Thanks to Kevin Qiu for the patch. * :bug:`912` Leaving ``template_dir`` un-specified when using ``upload_template`` in Jinja mode used to cause ``'NoneType' has no attribute 'startswith'`` errors. This has been fixed. Thanks to Erick Yellott for catch & to Erick Yellott + Kevin Williams for patches. * :feature:`924` Add new env var option ``colorize-errors`` to enable coloring errors and warnings. Thanks to Aaron Meurer for the patch. * :bug:`593` Non-ASCII character sets in Jinja templates rendered within ``upload_template`` would cause ``UnicodeDecodeError`` when uploaded. This has been addressed by encoding as ``utf-8`` prior to upload. Thanks to Sébastien Fievet for the catch. * :feature:`908` Support loading SSH keys from memory. Thanks to Caleb Groom for the patch. * :bug:`171` Added missing cross-references from ``env`` variables documentation to corresponding command-line options. Thanks to Daniel D. Beck for the contribution. * :bug:`884` The password cache feature was not working correctly with password-requiring SSH gateway connections. That's fixed now. Thanks to Marco Nenciarini for the catch. * :feature:`826` Enable sudo extraction of compressed archive via ``use_sudo`` kwarg in ``upload_project``. Thanks to ``@abec`` for the patch. * :bug:`694 major` Allow users to work around ownership issues in the default remote login directory: add ``temp_dir`` kwarg for explicit specification of which "bounce" folder to use when calling ``put`` with ``use_sudo=True``. Thanks to Devin Bayer for the report & Dieter Plaetinck / Jesse Myers for suggesting the workaround. * :bug:`882` Fix a ``get`` bug regarding spaces in remote working directory names. Thanks to Chris Rose for catch & patch. * :release:`1.6.1 <2013-05-23>` * :bug:`868` Substantial speedup of parallel tasks by removing an unnecessary blocking timeout in the ``JobQueue`` loop. Thanks to Simo Kinnunen for the patch. * :bug:`328` ``lcd`` was no longer being correctly applied to ``upload_template``; this has been fixed. Thanks to Joseph Lawson for the catch. * :feature:`812` Add ``use_glob`` option to ``put`` so users trying to upload real filenames containing glob patterns (``*``, ``[`` etc) can disable the default globbing behavior. Thanks to Michael McHugh for the patch. * :bug:`864 major` Allow users to disable Fabric's auto-escaping in ``run``/``sudo``. Thanks to Christian Long and Michael McHugh for the patch. * :bug:`870` Changes to shell env var escaping highlighted some extraneous and now damaging whitespace in ``with path():``. This has been removed and a regression test added. * :bug:`871` Use of string mode values in ``put(local, remote, mode="NNNN")`` would sometimes cause ``Unsupported operand`` errors. This has been fixed. * :bug:`84 major` Fixed problem with missing -r flag in Mac OS X sed version. Thanks to Konrad HaÅ‚as for the patch. * :bug:`861` Gracefully handle situations where users give a single string literal to ``env.hosts``. Thanks to Bill Tucker for catch & patch. * :bug:`367` Expand paths with tilde inside (``contrib.files``). Thanks to Konrad HaÅ‚as for catch & patch. * :feature:`845 backported` Downstream synchronization option implemented for ``fabric.contrib.project.rsync_project``. Thanks to Antonio Barrero for the patch. * :release:`1.6.0 <2013-03-01>` * :release:`1.5.4 <2013-03-01>` * :bug:`844` Account for SSH config overhaul in Paramiko 1.10 by e.g. updating treatment of ``IdentityFile`` to handle multiple values. **This and related SSH config parsing changes are backwards incompatible**; we are including them in this release because they do fix incorrect, off-spec behavior. * :bug:`843` Ensure string ``pool_size`` values get run through ``int()`` before deriving final result (stdlib ``min()`` has odd behavior here...). Thanks to Chris Kastorff for the catch. * :bug:`839` Fix bug in ``fabric.contrib.project.rsync_project`` where IPv6 address were not always correctly detected. Thanks to Antonio Barrero for catch & patch. * :bug:`587` Warn instead of aborting when ``env.use_ssh_config`` is True but the configured SSH conf file doesn't exist. This allows multi-user fabfiles to enable SSH config without causing hard stops for users lacking SSH configs. Thanks to Rodrigo Pimentel for the report. * :feature:`821` Add ``fabric.context_managers.remote_tunnel`` to allow reverse SSH tunneling (exposing locally-visible network ports to the remote end). Thanks to Giovanni Bajo for the patch. * :feature:`823` Add ``env.remote_interrupt`` which controls whether Ctrl-C is forwarded to the remote end or is captured locally (previously, only the latter behavior was implemented). Thanks to Geert Jansen for the patch. * :release:`1.5.3 <2013-01-28>` * :bug:`806` Force strings given to ``getpass`` during password prompts to be ASCII, to prevent issues on some platforms when Unicode is encountered. Thanks to Alex Louden for the patch. * :bug:`805` Update ``fabric.context_managers.shell_env`` to play nice with Windows (7, at least) systems and ``fabric.operations.local``. Thanks to Fernando Macedo for the patch. * :bug:`654` Parallel runs whose sum total of returned data was large (e.g. large return values from the task, or simply a large number of hosts in the host list) were causing frustrating hangs. This has been fixed. * :feature:`402` Attempt to detect stale SSH sessions and reconnect when they arise. Thanks to ``@webengineer`` for the patch. * :bug:`791` Cast ``fabric.operations.reboot``'s ``wait`` parameter to a numeric type in case the caller submitted a string by mistake. Thanks to Thomas Schreiber for the patch. * :bug:`703 major` Add a ``shell`` kwarg to many methods in ``fabric.contrib.files`` to help avoid conflicts with ``fabric.context_managers.cd`` and similar. Thanks to ``@mikek`` for the patch. * :feature:`730` Add ``env.system_known_hosts``/``--system-known-hosts`` to allow loading a user-specified system-level SSH ``known_hosts`` file. Thanks to Roy Smith for the patch. * :release:`1.5.2 <2013-01-15>` * :feature:`818` Added ``env.eagerly_disconnect`` option to help prevent pile-up of many open connections. * :feature:`706` Added ``env.tasks``, returning list of tasks to be executed by current ``fab`` command. * :bug:`766` Use the variable name of a new-style ``fabric.tasks.Task`` subclass object when the object name attribute is undefined. Thanks to ``@todddeluca`` for the patch. * :bug:`604` Fixed wrong treatment of backslashes in put operation when uploading directory tree on Windows. Thanks to Jason Coombs for the catch and ``@diresys`` & Oliver Janik for the patch. for the patch. * :bug:`792` The newish ``fabric.context_managers.shell_env`` context manager was incorrectly omitted from the ``fabric.api`` import endpoint. This has been remedied. Thanks to Vishal Rana for the catch. * :feature:`735` Add ``ok_ret_codes`` option to ``env`` to allow alternate return codes to be treated os "ok". Thanks to Andy Kraut for the pull request. * :bug:`775` Shell escaping was incorrectly applied to the value of ``$PATH`` updates in our shell environment handling, causing (at the very least) ``fabric.operations.local`` binary paths to become inoperable in certain situations. This has been fixed. * :feature:`787` Utilize new Paramiko feature allowing us to skip the use of temporary local files when using file-like objects in ``fabric.operations.get``/``fabric.operations.put``. * :feature:`249` Allow specification of remote command timeout value by setting ``env.command_timeout``. Thanks to Paul McMillan for suggestion & initial patch. * Added current host string to prompt abort error messages. * :release:`1.5.1 <2012-11-15>` * :bug:`776` Fixed serious-but-non-obvious bug in direct-tcpip driven gatewaying (e.g. that triggered by ``-g`` or ``env.gateway``.) Should work correctly now. * :bug:`771` Sphinx autodoc helper ``fabric.docs.unwrap_tasks`` didn't play nice with ``@task(name=xxx)`` in some situations. This has been fixed. * :release:`1.5.0 <2012-11-06>` * :release:`1.4.4 <2012-11-06>` * :feature:`38` (also :issue:`698`) Implement both SSH-level and ``ProxyCommand``-based gatewaying for SSH traffic. (This is distinct from tunneling non-SSH traffic over the SSH connection, which is :issue:`78` and not implemented yet.) * Thanks in no particular order to Erwin Bolwidt, Oskari Saarenmaa, Steven Noonan, Vladimir Lazarenko, Lincoln de Sousa, Valentino Volonghi, Olle Lundberg and Github user ``@acrish`` for providing the original patches to both Fabric and Paramiko. * :feature:`684 backported` (also :issue:`569`) Update how ``fabric.decorators.task`` wraps task functions to preserve additional metadata; this allows decorated functions to play nice with Sphinx autodoc. Thanks to Jaka Hudoklin for catch & patch. * :support:`103` (via :issue:`748`) Long standing Sphinx autodoc issue requiring error-prone duplication of function signatures in our API docs has been fixed. Thanks to Alex Morega for the patch. * :bug:`767 major` Fix (and add test for) regression re: having linewise output automatically activate when parallelism is in effect. Thanks to Alexander Fortin and Dustin McQuay for the bug reports. * :bug:`736 major` Ensure context managers that build env vars play nice with ``contextlib.nested`` by deferring env var reference to entry time, not call time. Thanks to Matthew Tretter for catch & patch. * :feature:`763` Add ``--initial-password-prompt`` to allow prefilling the password cache at the start of a run. Great for sudo-powered parallel runs. * :feature:`665` (and #629) Update ``fabric.contrib.files.upload_template`` to have a more useful return value, namely that of its internal ``fabric.operations.put`` call. Thanks to Miquel Torres for the catch & Rodrigue Alcazar for the patch. * :feature:`578` Add ``name`` argument to ``fabric.decorators.task`` to allow overriding of the default "function name is task name" behavior. Thanks to Daniel Simmons for catch & patch. * :feature:`761` Allow advanced users to parameterize ``fabric.main.main()`` to force loading of specific fabfiles. * :bug:`749` Gracefully work around calls to ``fabric.version`` on systems lacking ``/bin/sh`` (which causes an ``OSError`` in ``subprocess.Popen`` calls.) * :feature:`723` Add the ``group=`` argument to ``fabric.operations.sudo``. Thanks to Antti Kaihola for the pull request. * :feature:`725` Updated ``fabric.operations.local`` to allow override of which local shell is used. Thanks to Mustafa Khattab. * :bug:`704 major` Fix up a bunch of Python 2.x style ``print`` statements to be forwards compatible. Thanks to Francesco Del Degan for the patch. * :feature:`491` (also :feature:`385`) IPv6 host string support. Thanks to Max Arnold for the patch. * :feature:`699` Allow ``name`` attribute on file-like objects for get/put. Thanks to Peter Lyons for the pull request. * :bug:`711 major` ``fabric.sftp.get`` would fail when filenames had % in their path. Thanks to John Begeman * :bug:`702 major` ``fabric.operations.require`` failed to test for "empty" values in the env keys it checks (e.g. ``require('a-key-whose-value-is-an-empty-list')`` would register a successful result instead of alerting that the value was in fact empty. This has been fixed, thanks to Rich Schumacher. * :bug:`718` ``isinstance(foo, Bar)`` is used in ``fabric.main`` instead of ``type(foo) == Bar`` in order to fix some edge cases. Thanks to Mikhail Korobov. * :bug:`693` Fixed edge case where ``abort`` driven failures within parallel tasks could result in a top level exception (a ``KeyError``) regarding error handling. Thanks to Marcin KuźmiÅ„ski for the report. * :support:`681 backported` Fixed outdated docstring for ``fabric.decorators.runs_once`` which claimed it would get run multiple times in parallel mode. That behavior was fixed in an earlier release but the docs were not updated. Thanks to Jan Brauer for the catch. * :release:`1.4.3 <2012-07-06>` * :release:`1.3.8 <2012-07-06>` * :feature:`263` Shell environment variable support for ``fabric.operations.run``/``fabric.operations.sudo`` added in the form of the ``fabric.context_managers.shell_env`` context manager. Thanks to Oliver Tonnhofer for the original pull request, and to Kamil Kisiel for the final implementation. * :feature:`669` Updates to our Windows compatibility to rely more heavily on cross-platform Python stdlib implementations. Thanks to Alexey Diyan for the patch. * :bug:`671` ``reject-unknown-hosts`` sometimes resulted in a password prompt instead of an abort. This has been fixed. Thanks to Roy Smith for the report. * :bug:`659` Update docs to reflect that ``fabric.operations.local`` currently honors ``env.path``. Thanks to `@floledermann `_ for the catch. * :bug:`652` Show available commands when aborting on invalid command names. * :support:`651 backported` Added note about nesting ``with`` statements on Python 2.6+. Thanks to Jens Rantil for the patch. * :bug:`649` Don't swallow non-``abort``-driven exceptions in parallel mode. Fabric correctly printed such exceptions, and returned them from ``fabric.tasks.execute``, but did not actually cause the child or parent processes to halt with a nonzero status. This has been fixed. ``fabric.tasks.execute`` now also honors ``env.warn_only`` so users may still opt to call it by hand and inspect the returned exceptions, instead of encountering a hard stop. Thanks to Matt Robenolt for the catch. * :feature:`241` Add the command executed as a ``.command`` attribute to the return value of ``fabric.operations.run``/``fabric.operations.sudo``. (Also includes a second attribute containing the "real" command executed, including the shell wrapper and any escaping.) * :feature:`646` Allow specification of which local streams to use when ``fabric.operations.run``/``fabric.operations.sudo`` print the remote stdout/stderr, via e.g. ``run("command", stderr=sys.stdout)``. * :support:`645 backported` Update Sphinx docs to work well when run out of a source tarball as opposed to a Git checkout. Thanks again to ``@Arfrever`` for the catch. * :support:`640 backported` (also :issue:`644`) Update packaging manifest so sdist tarballs include all necessary test & doc files. Thanks to Mike Gilbert and ``@Arfrever`` for catch & patch. * :feature:`627` Added convenient ``quiet`` and ``warn_only`` keyword arguments to ``fabric.operations.run``/``fabric.operations.sudo`` which are aliases for ``settings(hide('everything'), warn_only=True)`` and ``settings(warn_only=True)``, respectively. (Also added corresponding context managers.) Useful for remote program calls which are expected to fail and/or whose output doesn't need to be shown to users. * :feature:`633` Allow users to turn off host list deduping by setting ``env.dedupe_hosts`` to ``False``. This enables running the same task multiple times on a single host, which was previously not possible. * :support:`634 backported` Clarified that ``fabric.context_managers.lcd`` does no special handling re: the user's current working directory, and thus relative paths given to it will be relative to ``os.getcwd()``. Thanks to `@techtonik `_ for the catch. * :release:`1.4.2 <2012-05-07>` * :release:`1.3.7 <2012-05-07>` * :bug:`562` Agent forwarding would error out or freeze when multiple uses of the forwarded agent were used per remote invocation (e.g. a single ``fabric.operations.run`` command resulting in multiple Git or SVN checkouts.) This has been fixed thanks to Steven McDonald and GitHub user ``@lynxis``. * :support:`626 backported` Clarity updates to the tutorial. Thanks to GitHub user ``m4z`` for the patches. * :bug:`625` ``fabric.context_managers.hide``/``fabric.context_managers.show`` did not correctly restore prior display settings if an exception was raised inside the block. This has been fixed. * :bug:`624` Login password prompts did not always display the username being authenticated for. This has been fixed. Thanks to Nick Zalutskiy for catch & patch. * :bug:`617` Fix the ``clean_revert`` behavior of ``fabric.context_managers.settings`` so it doesn't ``KeyError`` for newly created settings keys. Thanks to Chris Streeter for the catch. * :feature:`615` Updated ``fabric.operations.sudo`` to honor the new setting ``env.sudo_user`` as a default for its ``user`` kwarg. * :bug:`616` Add port number to the error message displayed upon connection failures. * :bug:`609` (and :issue:`564`) Document and clean up ``env.sudo_prefix`` so it can be more easily modified by users facing uncommon use cases. Thanks to GitHub users ``3point2`` for the cleanup and ``SirScott`` for the documentation catch. * :bug:`610` Change detection of ``env.key_filename``'s type (added as part of SSH config support in 1.4) so it supports arbitrary iterables. Thanks to Brandon Rhodes for the catch. * :release:`1.4.1 <2012-04-04>` * :release:`1.3.6 <2012-04-04>` * :bug:`608` Add ``capture`` kwarg to ``fabric.contrib.project.rsync_project`` to aid in debugging rsync problems. * :bug:`607` Allow ``fabric.operations.local`` to display stdout/stderr when it warns/aborts, if it was capturing them. * :bug:`395` Added an FAQ entry detailing how to handle init scripts which misbehave when a pseudo-tty is allocated. * :bug:`568` ``fabric.tasks.execute`` allowed too much of its internal state changes (to variables such as ``env.host_string`` and ``env.parallel``) to persist after execution completed; this caused a number of different incorrect behaviors. ``fabric.tasks.execute`` has been overhauled to clean up its own state changes -- while preserving any state changes made by the task being executed. * :bug:`584` ``fabric.contrib.project.upload_project`` did not take explicit remote directory location into account when untarring, and now uses ``fabric.context_managers.cd`` to address this. Thanks to Ben Burry for the patch. * :bug:`458` ``fabric.decorators.with_settings`` did not perfectly match ``fabric.context_managers.settings``, re: ability to inline additional context managers. This has been corrected. Thanks to Rory Geoghegan for the patch. * :bug:`499` ``contrib.files.first`` used an outdated function signature in its wrapped ``fabric.contrib.files.exists`` call. This has been fixed. Thanks to Massimiliano Torromeo for catch & patch. * :bug:`551` ``--list`` output now detects terminal window size and truncates (or doesn't truncate) accordingly. Thanks to Horacio G. de Oro for the initial pull request. * :bug:`572` Parallel task aborts (as oppposed to unhandled exceptions) now correctly print their abort messages instead of tracebacks, and cause the parent process to exit with the correct (nonzero) return code. Thanks to Ian Langworth for the catch. * :bug:`306` Remote paths now use posixpath for a separator. Thanks to Jason Coombs for the patch. * :release:`1.4.0 <2012-02-13>` * :release:`1.3.5 <2012-02-13>` * :release:`1.2.6 <2012-02-13>` * :release:`1.1.8 <2012-02-13>` * :bug:`495` Fixed documentation example showing how to subclass ``fabric.tasks.Task``. Thanks to Brett Haydon for the catch and Mark Merritt for the patch. * :bug:`410` Fixed a bug where using the ``fabric.decorators.task`` decorator inside/under another decorator such as ``fabric.decorators.hosts`` could cause that task to become invalid when invoked by name (due to how old-style vs new-style tasks are detected.) Thanks to Dan Colish for the initial patch. * :feature:`559` ``fabric.contrib.project.rsync_project`` now allows users to append extra SSH-specific arguments to ``rsync``'s ``--rsh`` flag. * :feature:`138` ``env.port`` may now be written to at fabfile module level to set a default nonstandard port number. Previously this value was read-only. * :feature:`3` Fabric can now load a subset of SSH config functionality directly from your local ``~/.ssh/config`` if ``env.use_ssh_config`` is set to ``True``. See ``ssh-config`` for details. Thanks to Kirill Pinchuk for the initial patch. * :feature:`12` Added the ability to try connecting multiple times to temporarily-down remote systems, instead of immediately failing. (Default behavior is still to only try once.) See ``env.timeout`` and ``env.connection_attempts`` for controlling both connection timeouts and total number of attempts. ``fabric.operations.reboot`` has also been overhauled (but practically deprecated -- see its updated docs.) * :feature:`474` ``fabric.tasks.execute`` now allows you to access the executed task's return values, by itself returning a dictionary whose keys are the host strings executed against. * :bug:`487 major` Overhauled the regular expression escaping performed in ``fabric.contrib.files.append`` and ``fabric.contrib.files.contains`` to try and handle more corner cases. Thanks to Neilen Marais for the patch. * :support:`532` Reorganized and cleaned up the output of ``fab --help``. * :feature:`8` Added ``--skip-bad-hosts``/``env.skip_bad_hosts`` option to allow skipping past temporarily down/unreachable hosts. * :feature:`13` Env vars may now be set at runtime via the new ``--set`` command-line flag. * :feature:`506` A new output alias, ``commands``, has been added, which allows hiding remote stdout and local "running command X" output lines. * :feature:`72` SSH agent forwarding support has made it into Fabric's SSH library, and hooks for using it have been added (disabled by default; use ``-A`` or ``env.forward_agent`` to enable.) Thanks to Ben Davis for porting an existing Paramiko patch to ``ssh`` and providing the necessary tweak to Fabric. * :release:`1.3.4 <2012-01-12>` * :bug:`492` ``@parallel`` did not automatically trigger linewise output, as was intended. This has been fixed. Thanks to Brandon Huey for the catch. * :bug:`510` Parallel mode is incompatible with user input, such as password/hostname prompts, and was causing cryptic ``Operation not supported by device`` errors when such prompts needed to be displayed. This behavior has been updated to cleanly and obviously ``abort`` instead. * :bug:`494` Fixed regression bug affecting some ``env`` values such as ``env.port`` under parallel mode. Symptoms included ``fabric.contrib.project.rsync_project`` bailing out due to a None port value when run under ``@parallel``. Thanks to Rob Terhaar for the report. * :bug:`339` Don't show imported ``fabric.colors`` members in ``--list`` output. Thanks to Nick Trew for the report. * :release:`1.3.3 <2011-11-23>` * :release:`1.2.5 <2011-11-23>` * :release:`1.1.7 <2011-11-23>` * :bug:`441` Specifying a task module as a task on the command line no longer blows up but presents the usual "no task by that name" error message instead. Thanks to Mitchell Hashimoto for the catch. * :bug:`475` Allow escaping of equals signs in per-task args/kwargs. * :bug:`450` Improve traceback display when handling ``ImportError`` for dependencies. Thanks to David Wolever for the patches. * :bug:`446` Add QNX to list of secondary-case ``fabric.contrib.files.sed`` targets. Thanks to Rodrigo Madruga for the tip. * :bug:`443` ``fabric.contrib.files.exists`` didn't expand tildes; now it does. Thanks to Riccardo Magliocchetti for the patch. * :bug:`437` ``fabric.decorators.with_settings`` now correctly preserves the wrapped function's docstring and other attributes. Thanks to Eric Buckley for the catch and Luke Plant for the patch. * :bug:`400` Handle corner case of systems where ``pwd.getpwuid`` raises ``KeyError`` for the user's UID instead of returning a valid string. Thanks to Dougal Matthews for the catch. * :bug:`397` Some poorly behaved objects in third party modules triggered exceptions during Fabric's "classic or new-style task?" test. A fix has been added which tries to work around these. * :bug:`341` ``fabric.contrib.files.append`` incorrectly failed to detect that the line(s) given already existed in files hidden to the remote user, and continued appending every time it ran. This has been fixed. Thanks to Dominique Peretti for the catch and Martin Vilcans for the patch. * :bug:`342` Combining ``fabric.context_managers.cd`` with ``fabric.operations.put`` and its ``use_sudo`` keyword caused an unrecoverable error. This has been fixed. Thanks to Egor M for the report. * :bug:`482` Parallel mode should imply linewise output; omission of this behavior was an oversight. * :bug:`230` Fix regression re: combo of no fabfile & arbitrary command use. Thanks to Ali Saifee for the catch. * :release:`1.3.2 <2011-11-07>` * :release:`1.2.4 <2011-11-07>` * :release:`1.1.6 <2011-11-07>` * :support:`459 backported` Update our ``setup.py`` files to note that PyCrypto released 2.4.1, which fixes the setuptools problems. * :support:`467 backported` (also :issue:`468`, :issue:`469`) Handful of documentation clarification tweaks. Thanks to Paul Hoffman for the patches. * :release:`1.3.1 <2011-10-24>` * :bug:`457` Ensured that Fabric fast-fails parallel tasks if any child processes encountered errors. Previously, multi-task invocations would continue to the 2nd, etc task when failures occurred, which does not fit with how Fabric usually behaves. Thanks to Github user ``sdcooke`` for the report and Morgan Goose for the fix. * :release:`1.3.0 <2011-10-23>` * :release:`1.2.3 <2011-10-23>` * :release:`1.1.5 <2011-10-23>` * :release:`1.0.5 <2011-10-23>` * :support:`275` To support an edge use case of the features released in :issue:`19`, and to lay the foundation for :issue:`275`, we have forked Paramiko into the `Python 'ssh' library `_ and changed our dependency to it for Fabric 1.3 and higher. This may have implications for the more uncommon install use cases, and package maintainers, but we hope to iron out any issues as they come up. * :bug:`323` ``fabric.operations.put`` forgot how to expand leading tildes in the remote file path. This has been corrected. Thanks to Piet Delport for the catch. * :feature:`21` It is now possible, using the new ``fabric.tasks.execute`` API call, to execute task objects (by reference or by name) from within other tasks or in library mode. ``fabric.tasks.execute`` honors the other tasks' ``fabric.decorators.hosts``/``fabric.decorators.roles`` decorators, and also supports passing in explicit host and/or role arguments. * :feature:`19` Tasks may now be optionally executed in parallel. Please see the parallel execution docs for details. Major thanks to Morgan Goose for the initial implementation. * :bug:`182` During display of remote stdout/stderr, Fabric occasionally printed extraneous line prefixes (which in turn sometimes overwrote wrapped text.) This has been fixed. * :bug:`430` Tasks decorated with ``fabric.decorators.runs_once`` printed extraneous 'Executing...' status lines on subsequent invocations. This is noisy at best and misleading at worst, and has been corrected. Thanks to Jacob Kaplan-Moss for the report. * :release:`1.2.2 <2011-09-01>` * :release:`1.1.4 <2011-09-01>` * :release:`1.0.4 <2011-09-01>` * :bug:`252` ``fabric.context_managers.settings`` would silently fail to set ``env`` values for keys which did not exist outside the context manager block. It now works as expected. Thanks to Will Maier for the catch and suggested solution. * :support:`393 backported` Fixed a typo in an example code snippet in the task docs. Thanks to Hugo Garza for the catch. * :bug:`396` ``--shortlist`` broke after the addition of ``--list-format`` and no longer displayed the short list format correctly. This has been fixed. * :bug:`373` Re-added missing functionality preventing host exclusion from working correctly. * :bug:`303` Updated terminal size detection to correctly skip over non-tty stdout, such as when running ``fab taskname | other_command``. * :release:`1.2.1 <2011-08-21>` * :release:`1.1.3 <2011-08-21>` * :release:`1.0.3 <2011-08-21>` * :bug:`417` ``abort-on-prompts`` would incorrectly abort when set to True, even if both password and host were defined. This has been fixed. Thanks to Valerie Ishida for the report. * :support:`416 backported` Updated documentation to reflect move from Redmine to Github. * :bug:`389` Fixed/improved error handling when Paramiko import fails. Thanks to Brian Luft for the catch. * :release:`1.2.0 <2011-07-12>` * :feature:`22` Enhanced ``@task`` to add aliasing, per-module default tasks, and control over the wrapping task class. Thanks to Travis Swicegood for the initial work and collaboration. * :bug:`380` Improved unicode support when testing objects for being string-like. Thanks to Jiri Barton for catch & patch. * :support:`382` Experimental overhaul of changelog formatting & process to make supporting multiple lines of development less of a hassle. * :release:`1.1.2 <2011-07-07>` * :release:`1.0.2 <2011-06-24>` fabric-2.6.0/sites/www/changelog.rst000066400000000000000000000266521400143053200174040ustar00rootroot00000000000000========= Changelog ========= .. note:: Looking for the Fabric 1.x changelog? See :doc:`/changelog-v1`. - :release:`2.6.0 <2021-01-18>` - :bug:`- major` Fix a handful of issues in the handling and mocking of SFTP local paths and ``os.path`` members within :ref:`fabric.testing `; this should remove some occasional "useless Mocks" as well as hewing closer to the real behavior of things like ``os.path.abspath`` re: path normalization. - :feature:`-` When the ``local`` path argument to `Transfer.get ` contains nonexistent directories, they are now created instead of raising an error. .. warning:: This change introduces a new runtime dependency: ``pathlib2``. - :feature:`1868` Ported a feature from v1: interpolating the local path argument in `Transfer.get ` with connection and remote filepath attributes. For example, ``cxn.get(remote="/var/log/foo.log", local="{host}/")`` is now feasible for storing a file in per-host-named directories or files, and in fact `Group.get ` does this by default. - :feature:`1810` Add `put `/`get ` support to `~fabric.group.Group`. - :feature:`1999` Add `sudo ` support to `~fabric.group.Group`. Thanks to Bonnie Hardin for the report and to Winston Nolan for an early patchset. - :release:`2.5.0 <2019-08-06>` - :support:`-` Update minimum Invoke version requirement to ``>=1.3``. - :feature:`1985` Add support for explicitly closing remote subprocess' stdin when local stdin sees an EOF, by implementing a new command-runner method recently added to Invoke; this prevents remote programs that 'follow' stdin from blocking forever. - :bug:`- major` Anonymous/'remainder' subprocess execution (eg ``fab -H host -- command``, as opposed to the use of `Connection.run ` inside tasks) was explicitly specifying ``in_stream=False`` (i.e. "disconnect from stdin") under the hood; this was leftover from early development and prevented use of interactive (or other stdin-reading) programs via this avenue. It has been removed; ``cat 'text' | fab -H somehost -- reads-from-stdin`` (or similar use cases) should work again. - :support:`-` Removed unnecessary Cryptography version pin from packaging metadata; this was an artifact from early development. At this point in time, only Paramiko's own direct dependency specification should matter. This is unlikely to affect anybody's install, since Paramiko has required newer Cryptography versions for a number of years now. - :feature:`-` Allow specifying connection timeouts (already available via `~fabric.connection.Connection` constructor argument and configuration option) on the command-line, via :option:`-t/--connect-timeout <-t>`. - :feature:`1989` Reinstate command timeouts, by supporting the implementation of that feature in Invoke (`pyinvoke/invoke#539 `_). Thanks to Israel Fruchter for report and early patchset. - :release:`2.4.0 <2018-09-13>` - :release:`2.3.2 <2018-09-13>` - :release:`2.2.3 <2018-09-13>` - :release:`2.1.6 <2018-09-13>` - :release:`2.0.5 <2018-09-13>` - :feature:`1849` Add `Connection.from_v1 ` (and `Config.from_v1 `) for easy creation of modern ``Connection``/``Config`` objects from the currently configured Fabric 1.x environment. Should make upgrading piecemeal much easier for many use cases. - :feature:`1780` Add context manager behavior to `~fabric.group.Group`, to match the same feature in `~fabric.connection.Connection`. Feature request by István Sárándi. - :feature:`1709` Add `Group.close ` to allow closing an entire group's worth of connections at once. Patch via Johannes Löthberg. - :bug:`-` Fix a bug preventing tab completion (using the Invoke-level ``--complete`` flag) from completing task names correctly (behavior was to act as if there were never any tasks present, even if there was a valid fabfile nearby). - :bug:`1850` Skip over ``ProxyJump`` configuration directives in SSH config data when they would cause self-referential ``RecursionError`` (e.g. due to wildcard-using ``Host`` stanzas which include the jump server itself). Reported by Chris Adams. - :bug:`-` Some debug logging was reusing Invoke's logger object, generating log messages "named" after ``invoke`` instead of ``fabric``. This has been fixed by using Fabric's own logger everywhere instead. - :bug:`1852` Grant internal `~fabric.connection.Connection` objects created during ``ProxyJump`` based gateways/proxies a copy of the outer ``Connection``'s configuration object. This was not previously done, which among other things meant one could not fully disable SSH config file loading (as the internal ``Connection`` objects would revert to the default behavior). Thanks to Chris Adams for the report. - :release:`2.3.1 <2018-08-08>` - :bug:`- (2.3+)` Update the new functionality added for :issue:`1826` so it uses ``export``; without this, nontrivial shell invocations like ``command1 && command2`` end up only applying the env vars to the first command. - :release:`2.3.0 <2018-08-08>` - :feature:`1826` Add a new Boolean configuration and `~fabric.connection.Connection` parameter, ``inline_ssh_env``, which (when set to ``True``) changes how Fabric submits shell environment variables to remote servers; this feature helps work around commonly restrictive ``AcceptEnv`` settings on SSH servers. Thanks to Massimiliano Torromeo and Max Arnold for the reports. - :release:`2.2.2 <2018-07-31>` - :release:`2.1.5 <2018-07-31>` - :release:`2.0.4 <2018-07-31>` - :bug:`-` Implement ``__lt__`` on `~fabric.connection.Connection` so it can be sorted; this was overlooked when implementing things like ``__eq__`` and ``__hash__``. (No, sorting doesn't usually matter much for this object type, but when you gotta, you gotta...) - :support:`1819 backported` Moved example code from the README into the Sphinx landing page so that we could apply doctests; includes a bunch of corrections to invalid example code! Thanks to Antonio Feitosa for the initial catch & patch. - :bug:`1749` Improve `~fabric.transfer.Transfer.put` behavior when uploading to directory (vs file) paths, which was documented as working but had not been fully implemented. The local path's basename (or file-like objects' ``.name`` attribute) is now appended to the remote path in this case. Thanks to Peter Uhnak for the report. - :feature:`1831` Grant `~fabric.group.Group` (and subclasses) the ability to take arbitrary keyword arguments and pass them onto the internal `~fabric.connection.Connection` constructors. This allows code such as:: mygroup = Group('host1', 'host2', 'host3', user='admin') which was previously impossible without manually stuffing premade ``Connection`` objects into `Group.from_connections `. - :bug:`1762` Fix problem where lower configuration levels' setting of ``connect_kwargs.key_filename`` were being overwritten by the CLI ``--identity`` flag's value...even when that value was the empty list. CLI-given values are supposed to win, but not quite that hard. Reported by ``@garu57``. - :support:`1653 backported` Clarify `~fabric.transfer.Transfer` API docs surrounding remote file paths, such as the lack of tilde expansion (a buggy and ultimately unnecessary v1 feature). Thanks to ``@pint12`` for bringing it up. - :release:`2.2.1 <2018-07-18>` - :bug:`1824` The changes implementing :issue:`1772` failed to properly account for backwards compatibility with Invoke-level task objects. This has been fixed; thanks to ``@ilovezfs`` and others for the report. - :release:`2.2.0 <2018-07-13>` - :release:`2.1.4 <2018-07-13>` - :release:`2.0.3 <2018-07-13>` - :bug:`-` The `fabric.testing.fixtures.remote` pytest fixture was found to not be properly executing expectation/sanity tests on teardown; this was an oversight and has been fixed. - :support:`-` Updated the minimum required Invoke version to ``1.1``. - :feature:`1772` ``@hosts`` is back -- as a `@task `/`Task ` parameter of the same name. Acts much like a per-task :option:`--hosts`, but can optionally take dicts of `fabric.connection.Connection` kwargs as well as the typical shorthand host strings. .. note:: As of this change, we are now recommending the use of the new-in-this-release Fabric-level `@task `/`Task ` objects instead of their Invoke counterparts, even if you're not using the ``hosts`` kwarg -- it will help future-proof your code for similar feature-adds later, and generally be less confusing than having mixed Invoke/Fabric imports for these object types. - :feature:`1766` Reinstate support for use as ``python -m fabric``, which (as in v1) now behaves identically to invoking ``fab``. Thanks to ``@RupeshPatro`` for the original patchset. - :bug:`1753` Set one of our test modules to skip user/system SSH config file loading by default, as it was too easy to forget to do so for tests aimed at related functionality. Reported by Chris Rose. - :release:`2.1.3 <2018-05-24>` - :bug:`-` Our packaging metadata lacked a proper ``MANIFEST.in`` and thus some distributions were not including ancillary directories like tests and documentation. This has been fixed. - :bug:`-` Our ``packages=`` argument to ``setuptools.setup`` was too specific and did not allow for subpackages...such as the newly added ``fabric.testing``. Fixed now. - :release:`2.1.2 <2018-05-24>` - :bug:`-` Minor fix to ``extras_require`` re: having ``fabric[pytest]`` encompass the contents of ``fabric[testing]``. - :release:`2.1.1 <2018-05-24>` - :bug:`-` Somehow neglected to actually add ``extras_require`` to our ``setup.py`` to enable ``pip install fabric[testing]`` et al. This has been fixed. We hope. - :release:`2.1.0 <2018-05-24>` - :release:`2.0.2 <2018-05-24>` - :feature:`-` Exposed our previously internal test helpers for use by downstream test suites, as the :ref:`fabric.testing ` subpackage. .. note:: As this code requires non-production dependencies, we've also updated our packaging metadata to publish some setuptools "extras", ``fabric[testing]`` (base) and ``fabric[pytest]`` (for pytest users). - :support:`1761 backported` Integration tests were never added to Travis or ported to pytest before 2.0's release; this has been addressed. - :support:`1759 backported` Apply the ``black`` code formatter to the codebase and engage it on Travis-CI. Thanks to Chris Rose. - :support:`1745 backported` Wrap any imports of ``invoke.vendor.*`` with ``try``/``except`` such that downstream packages which have removed ``invoke.vendor`` are still able to function by using stand-alone dependencies. Patch courtesy of Othmane Madjoudj. - :release:`2.0.1 <2018-05-14>` - :bug:`1740` A Python 3 wheel was not uploaded during the previous release as expected; it turned out we were lacking the typical 'build universal wheels' setting in our ``setup.cfg`` (due to copying it from the one other project in our family of projects which explicitly cannot build universal wheels!) This has been fixed and a proper universal wheel is now built. - :release:`2.0.0 <2018-05-08>` - :feature:`-` Rewrite for 2.0! See :ref:`upgrading`. fabric-2.6.0/sites/www/conf.py000066400000000000000000000014411400143053200162070ustar00rootroot00000000000000# Obtain shared config values import sys import os from os.path import abspath, join, dirname sys.path.append(abspath(join(dirname(__file__), ".."))) from shared_conf import * # Releases changelog extension extensions.append("releases") releases_document_name = ["changelog", "changelog-v1"] releases_github_path = "fabric/fabric" # Intersphinx for referencing API/usage docs extensions.append("sphinx.ext.intersphinx") # Default is 'local' building, but reference the public docs site when building # under RTD. target = join(dirname(__file__), "..", "docs", "_build") if on_rtd: target = "http://docs.fabfile.org/en/latest/" intersphinx_mapping.update({"docs": (target, None)}) # Sister-site links to API docs html_theme_options["extra_nav_links"] = {"API Docs": "http://docs.fabfile.org"} fabric-2.6.0/sites/www/contact.rst000066400000000000000000000031161400143053200170760ustar00rootroot00000000000000======= Contact ======= If you've scoured the :ref:`conceptual ` and :ref:`API ` documentation and still can't find an answer to your question, below are various support resources that should help. We do request that you do at least skim the documentation before posting tickets or mailing list questions, however! Mailing list ------------ The best way to get help with using Fabric is via the `fab-user mailing list `_ (currently hosted at ``nongnu.org``.) The Fabric developers do their best to reply promptly, and the list contains an active community of other Fabric users and contributors as well. Twitter ------- Fabric has an official Twitter account, `@pyfabric `_, which is used for announcements and occasional related news tidbits (e.g. "Hey, check out this neat article on Fabric!"). You may also want to follow the principal developer, `@bitprophet `_, for development updates and colorful commentary. .. _bugs: Bugs/ticket tracker ------------------- To file new bugs or search existing ones, you may visit Fabric's `Github Issues `_ page. This does require a (free, easy to set up) Github account. .. _irc: IRC --- We maintain a semi-official IRC channel at ``#fabric`` on Freenode (``irc://irc.freenode.net``) where the developers and other users may be found. As always with IRC, we can't promise immediate responses, but some folks keep logs of the channel and will try to get back to you when they can. fabric-2.6.0/sites/www/development.rst000066400000000000000000000040201400143053200177600ustar00rootroot00000000000000=========== Development =========== The Fabric development team is headed by `Jeff Forcier `_, aka ``bitprophet``. However, dozens of other developers pitch in by submitting patches and ideas via `GitHub issues and pull requests `_, :ref:`IRC ` or the `mailing list `_. Get the code ============ Please see the :ref:`source-code-checkouts` section of the :doc:`installing` page for details on how to obtain Fabric's source code. Contributing ============ There are a number of ways to get involved with Fabric: * **Use Fabric and send us feedback!** This is both the easiest and arguably the most important way to improve the project -- let us know how you currently use Fabric and how you want to use it. (Please do try to search the `ticket tracker`_ first, though, when submitting feature ideas.) * **Report bugs or submit feature requests.** We follow `contribution-guide.org `_'s guidelines, so please check them out before visiting the `ticket tracker`_. .. _ticket tracker: https://github.com/fabric/fabric/issues While we may not always reply promptly, we do try to make time eventually to inspect all contributions and either incorporate them or explain why we don't feel the change is a good fit. Support of older releases ========================= Major and minor releases do not usually mark the end of the previous line or lines of development: * Recent minor release branches typically continue to receive critical bugfixes, often extending back two or three release lines (so e.g. if 2.4 was the currently active release line, 2.3 and perhaps even 2.2 might get patches). * Depending on the nature of bugs found and the difficulty in backporting them, older release lines may also continue to get bugfixes -- but there's no guarantee of any kind. Thus, if a bug were found in 2.4 that affected 2.1 and could be easily applied, a new 2.1.x version *might* be released. fabric-2.6.0/sites/www/faq.rst000066400000000000000000000204361400143053200162160ustar00rootroot00000000000000========================================= Frequently Asked/Answered Questions (FAQ) ========================================= These are some of the most commonly encountered problems or frequently asked questions which we receive from users. They aren't intended as a substitute for reading the rest of the documentation, so please make sure you check it out if your question is not answered here. .. note:: Most API examples and links are for version 2 and up; FAQs specific to version 1 will typically be marked as such. .. warning:: Many questions about shell command execution and task behavior are answered on `Invoke's FAQ page `_ - please check there also! .. _remote-env-vars-dont-work: Explicitly set env variables are not being set correctly on the remote end! =========================================================================== If your attempts to set environment variables for things like `Connection.run ` appear to silently fail, you're almost certainly talking to an SSH server which is setting a highly restrictive `AcceptEnv `_. To fix, you can either modify the server's configuration to allow the env vars you're setting, or use the ``inline_ssh_env`` `~fabric.connection.Connection` parameter (or the :ref:`global config option ` of the same name) to force Fabric to send env vars prefixed before your command strings instead. The remote shell environment doesn't match interactive shells! ============================================================== You may find environment variables (or the behavior they trigger) differ interactively vs scripted via Fabric. For example, a program that's on your ``$PATH`` when you manually ``ssh`` in might not be visible when using `Connection.run `; or special per-program env vars such as those for Python, pip, Java etc are not taking effect; etc. The root cause of this is typically because the SSH server runs non-interactive commands via a very limited shell call: ``/path/to/shell -c "command"`` (for example, `OpenSSH `_). Most shells, when run this way, are not considered to be either **interactive** or **login** shells; and this then impacts which startup files get loaded. Users typically only modify shell files related to interactive operation (such as ``~/.bash_profile`` or ``/etc/zshrc``); such changes do not take effect when the SSH server is running one-off commands. To work around this, consult your shell's documentation to see if it offers any non-login, non-interactive config files; for example, ``zsh`` lets you configure ``/etc/zshrc`` or ``~/.zshenv`` for this purpose. .. note:: ``bash`` does not appear to offer standard non-login/non-interactive startup files, even in version 4. However, it may attempt to determine if it's being run by a remote-execution daemon and will apparently source ``~/.bashrc`` if so; check to see if this is the case on your target systems. .. note:: Another workaround for ``bash`` users is to reply on its ``$BASH_ENV`` functionality, which names a file path as the startup file to load: - configure your SSH server to ``AcceptEnv BASH_ENV``, so that you can actually set that env var for the remote session at the top level (most SSH servers disallow this method by default). - decide which file this should be, though if you're already modifying files like ``~/.bash_profile`` or ``~/.bashrc``, you may want to just point at that exact path. - set the Fabric configuration value ``run.env`` to aim at the above path, e.g. ``{"BASH_ENV": "~/.bash_profile"}``. .. _one-shell-per-command: My (``cd``/``workon``/``export``/etc) calls don't seem to work! =============================================================== While Fabric can be used for many shell-script-like tasks, there's a slightly unintuitive catch: each `~fabric.connection.Connection.run` or `~fabric.connection.Connection.sudo` call (or the ``run``/``sudo`` functions in v1) has its own distinct shell session. This is required in order for Fabric to reliably figure out, after your command has run, what its standard out/error and return codes were. Unfortunately, it means that code like the following doesn't behave as you might assume:: @task def deploy(c): c.run("cd /path/to/application") c.run("./update.sh") If that were a shell script, the second `~fabric.connection.Connection.run` call would have executed with a current working directory of ``/path/to/application/`` -- but because both commands are run in their own distinct session over SSH, it actually tries to execute ``$HOME/update.sh`` instead (since your remote home directory is the default working directory). A simple workaround is to make use of shell logic operations such as ``&&``, which link multiple expressions together (provided the left hand side executed without error) like so:: def deploy(c): c.run("cd /path/to/application && ./update.sh") .. TODO: reinsert mention of 'with cd():' if that is reimplemented .. note:: You might also get away with an absolute path and skip directory changing altogether:: def deploy(c): c.run("/path/to/application/update.sh") However, this requires that the command in question makes no assumptions about your current working directory! .. TODO: reinstate FAQ about 'su' / running as another user, when sudo grows that back. (Probably in Invoke tho.) Why do I sometimes see ``err: stdin: is not a tty``? ==================================================== See :ref:`Invoke's FAQ ` for this; even for Fabric v1, which is not based on Invoke, the answer is the same. .. _faq-daemonize: Why can't I run programs in the background with ``&``? It makes Fabric hang. ============================================================================ Because SSH executes a new shell session on the remote end for each invocation of ``run`` or ``sudo`` (:ref:`see also `), backgrounded processes may prevent the calling shell from exiting until the processes stop running, which in turn prevents Fabric from continuing on with its own execution. The key to fixing this is to ensure that your process' standard pipes are all disassociated from the calling shell, which may be done in a number of ways (listed in order of robustness): * Use a pre-existing daemonization technique if one exists for the program at hand -- for example, calling an init script instead of directly invoking a server binary. * Or leverage a process manager such as ``supervisord``, ``upstart`` or ``systemd`` - such tools let you define what it means to "run" one of your background processes, then issue init-script-like start/stop/restart/status commands. They offer many advantages over classic init scripts as well. * Use ``tmux``, ``screen`` or ``dtach`` to fully detach the process from the running shell; these tools have the benefit of allowing you to reattach to the process later on if needed (though they are more ad-hoc than ``supervisord``-like tools). * Run the program under ``nohup`` or similar "in-shell" tools - note that this approach has seen limited success for most users. I'm sometimes incorrectly asked for a passphrase instead of a password. ======================================================================= Due to a bug of sorts in our SSH layer, it's not currently possible for Fabric to always accurately detect the type of authentication needed. We have to try and guess whether we're being asked for a private key passphrase or a remote server password, and in some cases our guess ends up being wrong. The most common such situation is where you, the local user, appear to have an SSH keychain agent running, but the remote server is not able to honor your SSH key, e.g. you haven't yet transferred the public key over or are using an incorrect username. In this situation, Fabric will prompt you with "Please enter passphrase for private key", but the text you enter is actually being sent to the remote end's password authentication. We hope to address this in future releases by contributing to the aforementioned SSH library. fabric-2.6.0/sites/www/index.rst000066400000000000000000000145041400143053200165550ustar00rootroot00000000000000Welcome to Fabric! ================== What is Fabric? --------------- Fabric is a high level Python (2.7, 3.4+) library designed to execute shell commands remotely over SSH, yielding useful Python objects in return: .. testsetup:: opener mock = MockRemote() # NOTE: hard to get trailing whitespace in a doctest/snippet block, so we # just leave the 'real' newline off here too. Whatever. mock.expect(out=b"Linux") .. testcleanup:: opener mock.stop() .. doctest:: opener >>> from fabric import Connection >>> result = Connection('web1.example.com').run('uname -s', hide=True) >>> msg = "Ran {0.command!r} on {0.connection.host}, got stdout:\n{0.stdout}" >>> print(msg.format(result)) Ran 'uname -s' on web1.example.com, got stdout: Linux It builds on top of `Invoke `_ (subprocess command execution and command-line features) and `Paramiko `_ (SSH protocol implementation), extending their APIs to complement one another and provide additional functionality. .. note:: Fabric users may also be interested in two *strictly optional* libraries which implement best-practice user-level code: `Invocations `_ (Invoke-only, locally-focused CLI tasks) and `Patchwork `_ (remote-friendly, typically shell-command-focused, utility functions). How is it used? --------------- Core use cases for Fabric include (but are not limited to): * Single commands on individual hosts: .. testsetup:: single-command from fabric import Connection mock = MockRemote() mock.expect(out=b"web1") .. testcleanup:: single-command mock.stop() .. doctest:: single-command >>> result = Connection('web1').run('hostname') web1 >>> result * Single commands across multiple hosts (via varying methodologies: serial, parallel, etc): .. testsetup:: multiple-hosts from fabric import Connection mock = MockRemote() mock.expect_sessions( Session(host='web1', cmd='hostname', out=b'web1\n'), Session(host='web2', cmd='hostname', out=b'web2\n'), ) .. testcleanup:: multiple-hosts mock.stop() .. doctest:: multiple-hosts >>> from fabric import SerialGroup >>> result = SerialGroup('web1', 'web2').run('hostname') web1 web2 >>> # Sorting for consistency...it's a dict! >>> sorted(result.items()) [(, ), ...] * Python code blocks (functions/methods) targeted at individual connections: .. testsetup:: tasks from fabric import Connection mock = MockRemote() mock.expect(commands=[ Command("uname -s", out=b"Linux\n"), Command("df -h / | tail -n1 | awk '{print $5}'", out=b'33%\n'), ]) .. testcleanup:: tasks mock.stop() .. doctest:: tasks >>> def disk_free(c): ... uname = c.run('uname -s', hide=True) ... if 'Linux' in uname.stdout: ... command = "df -h / | tail -n1 | awk '{print $5}'" ... return c.run(command, hide=True).stdout.strip() ... err = "No idea how to get disk space on {}!".format(uname) ... raise Exit(err) ... >>> print(disk_free(Connection('web1'))) 33% * Python code blocks on multiple hosts: .. testsetup:: tasks-on-multiple-hosts from fabric import Connection, SerialGroup mock = MockRemote() mock.expect_sessions( Session(host='web1', commands=[ Command("uname -s", out=b"Linux\n"), Command("df -h / | tail -n1 | awk '{print $5}'", out=b'33%\n'), ]), Session(host='web2', commands=[ Command("uname -s", out=b"Linux\n"), Command("df -h / | tail -n1 | awk '{print $5}'", out=b'17%\n'), ]), Session(host='db1', commands=[ Command("uname -s", out=b"Linux\n"), Command("df -h / | tail -n1 | awk '{print $5}'", out=b'2%\n'), ]), ) .. testcleanup:: tasks-on-multiple-hosts mock.stop() .. doctest:: tasks-on-multiple-hosts >>> # NOTE: Same code as above! >>> def disk_free(c): ... uname = c.run('uname -s', hide=True) ... if 'Linux' in uname.stdout: ... command = "df -h / | tail -n1 | awk '{print $5}'" ... return c.run(command, hide=True).stdout.strip() ... err = "No idea how to get disk space on {}!".format(uname) ... raise Exit(err) ... >>> for cxn in SerialGroup('web1', 'web2', 'db1'): ... print("{}: {}".format(cxn, disk_free(cxn))) : 33% : 17% : 2% In addition to these library-oriented use cases, Fabric makes it easy to integrate with Invoke's command-line task functionality, invoking via a ``fab`` binary stub: * Python functions, methods or entire objects can be used as CLI-addressable tasks, e.g. ``fab deploy``; * Tasks may indicate other tasks to be run before or after they themselves execute (pre- or post-tasks); * Tasks are parameterized via regular GNU-style arguments, e.g. ``fab deploy --env=prod -d``; * Multiple tasks may be given in a single CLI session, e.g. ``fab build deploy``; * Much more - all other Invoke functionality is supported - see `its documentation `_ for details. I'm a user of Fabric 1, how do I upgrade? ----------------------------------------- We've packaged modern Fabric in a manner that allows installation alongside Fabric 1, so you can upgrade at whatever pace your use case requires. There are multiple possible approaches -- see our :ref:`detailed upgrade documentation ` for details. What is this website? --------------------- ``www.fabfile.org`` provides project information for Fabric such as the changelog, contribution guidelines, development roadmap, news/blog, and so forth. Detailed conceptual and API documentation can be found at our code documentation site, `docs.fabfile.org `_. .. toctree:: :hidden: changelog changelog-v1 FAQs installing installing-1.x upgrading development troubleshooting Roadmap contact fabric-2.6.0/sites/www/installing-1.x.rst000066400000000000000000000056331400143053200202210ustar00rootroot00000000000000================ Installing (1.x) ================ .. note:: Installing Fabric 2.0 or above? Looking for non-PyPI downloads or source code checkout instructions? See :doc:`installing`. This document includes legacy notes on installing Fabric 1.x. Users are strongly encouraged to upgrade to 2.x when possible. Basic installation ================== Fabric is best installed via `pip `_; to ensure you get Fabric 1 instead of the new but incompatible Fabric 2, specify ``<2.0``:: $ pip install 'fabric<2.0' All advanced ``pip`` use cases work too, such as installing the latest copy of the ``v1`` development branch:: $ pip install -e 'git+https://github.com/fabric/fabric@v1#egg=fabric' Or cloning the Git repository and running:: $ git checkout v1 $ pip install -e . within it. Your operating system may also have a Fabric package available (though these are typically older and harder to support), typically called ``fabric`` or ``python-fabric``. E.g.:: $ sudo apt-get install fabric .. note:: Make sure to confirm which major version is currently packaged! Dependencies ============ In order for Fabric's installation to succeed, you will need four primary pieces of software: * the Python programming language; * the ``setuptools`` packaging/installation library; * the Python `Paramiko `_ SSH library; * and Paramiko's dependency, `Cryptography `_. and, if using parallel execution mode, * the `multiprocessing`_ library. Please read on for important details on each dependency -- there are a few gotchas. Python ------ Fabric requires `Python `_ version 2.5+. setuptools ---------- `Setuptools`_ comes with most Python installations by default; if yours doesn't, you'll need to grab it. In such situations it's typically packaged as ``python-setuptools``, ``py26-setuptools`` or similar. .. _setuptools: https://pypi.org/project/setuptools ``multiprocessing`` ------------------- An optional dependency, the ``multiprocessing`` library is included in Python's standard library in version 2.6 and higher. If you're using Python 2.5 and want to make use of Fabric's parallel execution features you'll need to install it manually; the recommended route, as usual, is via ``pip``. Please see the `multiprocessing PyPI page `_ for details. .. warning:: Early versions of Python 2.6 (in our testing, 2.6.0 through 2.6.2) ship with a buggy ``multiprocessing`` module that appears to cause Fabric to hang at the end of sessions involving large numbers of concurrent hosts. If you encounter this problem, either use ``env.pool_size`` / ``-z`` to limit the amount of concurrency, or upgrade to Python >=2.6.3. Python 2.5 is unaffected, as it requires the PyPI version of ``multiprocessing``, which is newer than that shipped with Python <2.6.3. fabric-2.6.0/sites/www/installing.rst000066400000000000000000000132031400143053200176050ustar00rootroot00000000000000========== Installing ========== .. note:: Users looking to install Fabric 1.x should see :doc:`installing-1.x`. However, :doc:`upgrading ` to 2.x is strongly recommended. Fabric is best installed via `pip `_:: $ pip install fabric All advanced ``pip`` use cases work too, such as:: $ pip install -e git+https://github.com/fabric/fabric Or cloning the Git repository and running:: $ pip install -e . within it. Your operating system may also have a Fabric package available (though these are typically older and harder to support), typically called ``fabric`` or ``python-fabric``. E.g.:: $ sudo apt-get install fabric .. _installing-as-fabric2: Installing modern Fabric as ``fabric2`` ======================================= Users who are migrating from Fabric 1 to Fabric 2+ may find it useful to have both versions installed side-by-side. The easiest way to do this is to use the handy ``fabric2`` PyPI entry:: $ pip install fabric2 This upload is generated from the normal Fabric repository, but is tweaked at build time so that it installs a ``fabric2`` package instead of a ``fabric`` one (and a ``fab2`` binary instead of a ``fab`` one.) The codebase is otherwise unchanged. Users working off of the Git repository can enable that same tweak with an environment variable, e.g.:: $ PACKAGE_AS_FABRIC2=yes pip install -e . .. note:: The value of the environment variable doesn't matter, as long as it is not empty. ``fabric`` and ``fabric2`` vs ``fabric3`` ----------------------------------------- Unfortunately, the ``fabric3`` entry on PyPI is an unauthorized fork of Fabric 1.x which we do not control. Once modern Fabric gets up to 3.x, 4.x etc, we'll likely continue distributing it via both ``fabric`` and ``fabric2`` for convenience; there will never be any official ``fabric3``, ``fabric4`` etc. In other words, ``fabric2`` is purely there to help users of 1.x cross the 2.0 "major rewrite" barrier; future major versions will *not* be large rewrites and will only have small sets of backward incompatibilities. Inability to ``pip install -e`` both versions --------------------------------------------- You may encounter issues if *both* versions of Fabric are installed via ``pip install -e``, due to how that functionality works (tl;dr it just adds the checkout directories to ``sys.path``, regardless of whether you wanted to "install" all packages within them - so Fabric 2+'s ``fabric/`` package still ends up visible to the import system alongside ``fabric2/``). Thus, you may only have one of the local copies of Fabric installed in 'editable' fashion at a time, and the other must be repeatedly reinstalled via ``pip install`` (no ``-e``) if you need to make edits to it. Order of installations ---------------------- Due to the same pip quirk mentioned above, if either of your Fabric versions are installed in 'editable' mode, you **must** install the 'editable' version first, and then install the 'static' version second. For example, if you're migrating from some public release of Fabric 1 to a checkout of modern Fabric:: $ PACKAGE_AS_FABRIC2=yes pip install -e /path/to/fabric2 $ pip install fabric==1.14.0 You may see some warnings on that second ``pip install`` (eg ``Not uninstalling fabric`` or ``Can't uninstall 'fabric'``) but as long as it exits cleanly and says something like ``Successfully installed fabric-1.14.0``, you should be okay. Double check with e.g. ``pip list`` and you should have entries for both ``fabric`` and ``fabric2``. Dependencies ============ In order for Fabric's installation to succeed, you will need the following: * the Python programming language, versions 2.7 or 3.4+; * the `Invoke `_ command-running and task-execution library; * and the `Paramiko `_ SSH library (as well as its own dependencies; see `its install docs `_.) Development dependencies ------------------------ If you are interested in doing development work on Fabric (or even just running the test suite), you'll need the libraries listed in the ``dev-requirements.txt`` (included in the source distribution.) Usually it's easy to simply ``pip install -r dev-requirements.txt``. .. _downloads: Downloads ========= To obtain a tar.gz or zip archive of the Fabric source code, you may visit `Fabric's PyPI page `_, which offers manual downloads in addition to being the entry point for ``pip``. .. _source-code-checkouts: Source code checkouts ===================== The Fabric developers manage the project's source code with the `Git `_ DVCS. To follow Fabric's development via Git instead of downloading official releases, you have the following options: * Clone the canonical repository straight from `the Fabric organization's repository on Github `_ (cloning instructions available on that page). * Make your own fork of the Github repository by making a Github account, visiting `fabric/fabric `_ and clicking the "fork" button. .. note:: If you've obtained the Fabric source via source control and plan on updating your checkout in the future, we highly suggest using ``pip install -e .`` (or ``python setup.py develop``) instead -- it will use symbolic links instead of file copies, ensuring that imports of the library or use of the command-line tool will always refer to your checkout. For information on the hows and whys of Fabric development, including which branches may be of interest and how you can help out, please see the :doc:`development` page. fabric-2.6.0/sites/www/roadmap.rst000066400000000000000000000042601400143053200170670ustar00rootroot00000000000000.. _roadmap: =================== Development roadmap =================== This document outlines Fabric's intended development path. Please make sure you're reading `the latest version `_ of this document, and also see the page about :ref:`upgrading ` if you are migrating from version 1 to versions 2 or above. Fabric 2 and above ================== Modern Fabric versions (2+) receive active feature and bugfix development: - **2.0**: Initial public release, arguably a technology preview and a packaging/upgrade trial. Intent is to act as a jolt for users of 1.x who aren't pinning their dependencies (sorry, folks!), enable installation via PyPI so users don't have to install via Git to start upgrading, and generally get everything above-board and iterating in classic semantic versioning fashion. - **2.1, 2.2, 2.3, etc**: Implement the most pressing "missing features", including features which were present in 1.0 (see :ref:`upgrading` for details on these) as well as any brand new features we've been wanting in 2.x for a while (though most of these will come via Invoke and/or Paramiko releases -- see note below for more). - **3.0, 4.0, etc**: Subsequent major releases will **not** be full-on rewrites as 2.0 was, but will be *small* (feature-release-sized) releases that just happen to contain one or more backwards incompatible API changes. These will be clearly marked in the changelog and reflected in the upgrading documentation. .. note:: Many features that you may use via Fabric will only need development in the libraries Fabric wraps -- `Invoke `_ and `Paramiko `_ -- and unless Fabric itself needs changes to match, you can often get new features by upgrading only one of the three. Make sure to check the other projects' changelogs periodically! Fabric 1.x ========== Fabric 1.x has reached a tipping point regarding internal tech debt, lack of testability & ability to make improvements without harming backwards compatibility. As such, the 1.x line now receives bugfixes only. We **strongly** encourage all users to :ref:`upgrade ` to Fabric 2.x. fabric-2.6.0/sites/www/troubleshooting.rst000066400000000000000000000047501400143053200206770ustar00rootroot00000000000000=============== Troubleshooting =============== Stuck? Having a problem? Here are the steps to try before you submit a bug report. * **Make sure you're on the latest version.** If you're not on the most recent version, your problem may have been solved already! Upgrading is always the best first step. * **Try older versions.** If you're already *on* the latest Fabric, try rolling back a few minor versions (e.g. if on 2.3, try Fabric 2.2 or 2.1) and see if the problem goes away. This will help the devs narrow down when the problem first arose in the commit log. * **Try switching up your Paramiko.** Fabric relies heavily on the Paramiko library for its SSH functionality, so try applying the above two steps to your Paramiko install as well. .. note:: Fabric versions sometimes have different Paramiko dependencies - so to try older Paramikos you may need to downgrade Fabric as well. * **Make sure Fabric is really the problem.** If your problem is in the behavior or output of a remote command, try recreating it without Fabric involved: * Find out the exact command Fabric is executing on your behalf: - In 2.x and up, activate command echoing via the ``echo=True`` keyword argument, the ``run.echo`` config setting, or the ``-e`` CLI option. - In 1.x, run Fabric with ``--show=debug`` and look for ``run:`` or ``sudo:`` lines. * Execute the command in an interactive remote shell first, to make sure it works for a regular human; this will catch issues such as errors in command construction. * If that doesn't find the issue, run the command over a non-shell SSH session, e.g. ``ssh yourserver "your command"``. Depending on your settings and Fabric version, you may want to use ``ssh -T`` (disable PTY) or ``-t`` (enable PTY) to most closely match how Fabric is executing the command. * **Enable Paramiko-level debug logging.** If your issue is in the lower level Paramiko library, it can help us to see the debug output Paramiko prints. At top level in your fabfile (or in an appropriate module, if not using a fabfile), add the following:: import logging logging.basicConfig(level=logging.DEBUG) This should start printing Paramiko's debug statements to your standard error stream. (Feel free to add more logging kwargs to ``basicConfig()`` such as ``filename='/path/to/a/file'`` if you like.) Then submit this info to anybody helping you on IRC or in your bug report. fabric-2.6.0/sites/www/upgrading.rst000066400000000000000000002236121400143053200174300ustar00rootroot00000000000000.. _upgrading: ================== Upgrading from 1.x ================== Modern Fabric (2+) represents a near-total reimplementation & reorganization of the software. It's been :ref:`broken in two `, cleaned up, made more explicit, and so forth. In some cases, upgrading requires only basic search & replace; in others, more work is needed. If you read this document carefully, it should guide you in the right direction until you're fully upgraded. If any functionality you're using in Fabric 1 isn't listed here, please file a ticket `on Github `_ and we'll update it ASAP. .. warning:: As of the 2.0 release line, Fabric 2 is **not** at 100% feature parity with 1.x! Some features have been explicitly dropped, but others simply have not been ported over yet, either due to time constraints or because said features need to be re-examined in a modern context. Please review the information below, including the :ref:`upgrade-specifics` section which contains a very detailed list, before filing bug reports! Also see :ref:`the roadmap ` for additional notes about release versioning. Why upgrade? ============ We'd like to call out, in no particular order, some specific improvements in modern Fabric that might make upgrading worth your time. .. note:: These are all listed in the rest of the doc too, so if you're already sold, just skip there. - Python 3 compatibility (specifically, we now support 2.7 and 3.4+); - Thread-safe - no more requirement on multiprocessing for concurrency; - API reorganized around `fabric.connection.Connection` objects instead of global module state; - Command-line parser overhauled to allow for regular GNU/POSIX style flags and options on a per-task basis (no more ``fab mytask:weird=custom,arg=format``); - Task organization is more explicit and flexible / has less 'magic'; - Tasks can declare other tasks to always be run before or after themselves; - Configuration massively expanded to allow for multiple config files & formats, env vars, per-user/project/module configs, and much more; - SSH config file loading enabled by default & has been fleshed out re: system/user/runtime file selection; - Shell command execution API consistent across local and remote method calls - no more differentiation between ``local`` and ``run`` (besides where the command runs, of course!); - Shell commands significantly more flexible re: interactive behavior, simultaneous capture & display (now applies to local subprocesses, not just remote), encoding control, and auto-responding; - Use of Paramiko's APIs for the SSH layer much more transparent - e.g. `fabric.connection.Connection` allows control over the kwargs given to `SSHClient.connect `; - Gateway/jump-host functionality offers a ``ProxyJump`` style 'native' (no proxy-command subprocesses) option, which can be nested infinitely; 'Sidegrading' to Invoke ======================= We linked to a note about this above, but to be explicit: modern Fabric is really a few separate libraries, and anything not strictly SSH or network related has been :ref:`split out into the Invoke project `. This means that if you're in the group of users leveraging Fabric solely for its task execution or ``local``, and never used ``run``, ``put`` or similar - **you don't need to use Fabric itself anymore** and can simply **'sidegrade' to Invoke instead**. You'll still want to read over this document to get a sense of how things have changed, but be aware that you can get away with ``pip install invoke`` and won't need Fabric, Paramiko, cryptography dependencies, or anything else. Using modern Fabric from within Invoke ====================================== We intend to enhance modern Fabric until it encompasses the bulk of Fabric 1's use cases, such that you can use ``fab`` and fabfiles on their own without caring too much about how it's built on top of Invoke. However, prior to that point -- and very useful on its own for intermediate-to-advanced users -- is the fact that modern Fabric is designed with library or direct API use in mind. **It's entirely possible, and in some cases preferable, to use Invoke for your CLI needs and Fabric as a pure API within your Invoke tasks.** In other words, you can eschew ``fab``/fabfiles entirely unless you find yourself strongly needing the conveniences it wraps around ad-hoc sessions, such as :option:`--hosts` and the like. Running both Fabric versions simultaneously =========================================== To help with gradual upgrades, modern Fabric may be installed under the name ``fabric2`` (in addition to being made available "normally" as versions 2.0+ of ``fabric``) and can live alongside installations of version 1.x. Thus, if you have a large codebase and don't want to make the jump to modern versions in one leap, it's possible to have both Fabric 1 (``fabric``, as you presumably had it installed previously) and modern Fabric (as ``fabric2``) resident in your Python environment simultaneously. .. note:: We strongly recommend that you eventually migrate all code using Fabric 1, to versions 2 or above, so that you can move back to installing and importing under the ``fabric`` name. ``fabric2`` as a distinct package and module is intended to be a stopgap, and there will not be any ``fabric3`` or above (not least because some of those names are already taken!) For details on how to obtain the ``fabric2`` version of the package, see :ref:`installing-as-fabric2`. .. _from-v1: Creating ``Connection`` and/or ``Config`` objects from v1 settings ------------------------------------------------------------------ A common tactic when upgrading piecemeal is to generate modern Fabric objects whose contents match the current Fabric 1 environment. Whereas Fabric 1 stores *all* configuration (including the "current host") in a single place -- the ``env`` object -- modern Fabric breaks things up into multiple (albeit composed) objects: `~fabric.connection.Connection` for per-connection parameters, and `~fabric.config.Config` for general settings and defaults. In most cases, you'll only need to generate a `~fabric.connection.Connection` object using the alternate class constructor `Connection.from_v1 `, which should be fed your appropriate local ``fabric.api.env`` object; see its API docs for details. A contrived example:: from fabric.api import env, run from fabric2 import Connection env.host_string = "admin@myserver" run("whoami") # v1 cxn = Connection.from_v1(env) cxn.run("whoami") # v2+ By default, this constructor calls another API member -- `Config.from_v1 ` -- internally on your behalf. Users who need tighter control over modern-style config options may opt to call that classmethod explicitly and hand their modified result into `Connection.from_v1 `, which will cause the latter to skip any implicit config creation. .. _v1-env-var-imports: Mapping of v1 ``env`` vars to modern API members ------------------------------------------------ The ``env`` vars and how they map to `~fabric.connection.Connection` arguments or `~fabric.config.Config` values (when fed into the ``.from_v1`` constructors described above) are listed below. .. list-table:: :header-rows: 1 * - v1 ``env`` var - v2+ usage (prefixed with the class it ends up in) * - ``always_use_pty`` - Config: ``run.pty``. * - ``command_timeout`` - Config: ``timeouts.command``; timeouts are now their own config subtree, whereas in v1 it was possible for the ambiguous ``timeout`` setting -- see below -- to work for either connect OR command timeouts. * - ``forward_agent`` - Config: ``connect_kwargs.forward_agent``. * - ``gateway`` - Config: ``gateway``. * - ``host_string`` - Connection: ``host`` kwarg (which can handle host-string like values, including user/port). * - ``key`` - **Not supported**: Fabric 1 performed extra processing on this (trying a bunch of key classes to instantiate) before handing it into Paramiko; modern Fabric prefers to just let you handle Paramiko-level parameters directly. If you're filling your Fabric 1 ``key`` data from a file, we recommend switching to ``key_filename`` instead, which is supported. If you're loading key data from some other source as a string, you should know what type of key your data is and manually instantiate it instead, then supply it to the ``connect_kwargs`` parameter. For example:: from io import StringIO # or 'from StringIO' on Python 2 from fabric.state import env from fabric2 import Connection from paramiko import RSAKey from somewhere import load_my_key_string pkey = RSAKey.from_private_key(StringIO(load_my_key_string())) cxn = Connection.from_v1(env, connect_kwargs={"pkey": pkey}) * - ``key_filename`` - Config: ``connect_kwargs.key_filename``. * - ``no_agent`` - Config: ``connect_kwargs.allow_agent`` (inverted). * - ``password`` - Config: ``connect_kwargs.password``, as well as ``sudo.password`` **if and only if** the env's ``sudo_password`` (see below) is unset. (This mimics how v1 uses this particular setting - in earlier versions there was no ``sudo_password`` at all.) * - ``port`` - Connection: ``port`` kwarg. Is casted to an integer due to Fabric 1's default being a string value (which is not valid in v2). .. note:: Since v1's ``port`` is used both for a default *and* to store the current connection state, v2 uses it to fill in the Connection only, and not the Config, on assumption that it will typically be the current connection state. * - ``ssh_config_path`` - Config: ``ssh_config_path``. * - ``sudo_password`` - Config: ``sudo.password``. * - ``sudo_prompt`` - Config: ``sudo.prompt``. * - ``timeout`` - Config: ``timeouts.connection``, for connection timeouts, or ``timeouts.command`` for command timeouts (see above). * - ``use_ssh_config`` - Config: ``load_ssh_configs``. * - ``user`` - Connection: ``user`` kwarg. * - ``warn_only`` - Config: ``run.warn`` .. _upgrade-specifics: Upgrade specifics ================= This is (intended to be) an exhaustive list of *all* Fabric 1.x functionality, as well as new-to-Invoke-or-Fabric-2 functionality not present in 1.x; it specifies whether upgrading is necessary, how to upgrade if so, and tracks features which haven't been implemented in modern versions yet. Most sections are broken down in table form, as follows: .. list-table:: * - Fabric 1 feature or behavior - Status, see below for breakdown - Migration notes, removal rationale, etc Below are the typical values for the 'status' column, though some of them are a bit loose - make sure to read the notes column in all cases! Also note that things are not ironclad - eg any 'removed' item has some chance of returning if enough users request it or use cases are made that workarounds are insufficient. - **Ported**: available already, possibly renamed or moved (frequently, moved into the `Invoke `_ codebase.) - **Pending**: would fit, but has not yet been ported, good candidate for a patch. *These entries link to the appropriate Github ticket* - please do not make new ones! - **Removed**: explicitly *not* ported (no longer fits with vision, had too poor a maintenance-to-value ratio, etc) and unlikely to be reinstated. Here's a quick local table of contents for navigation purposes: .. contents:: :local: .. _upgrading-general: General / conceptual -------------------- - Modern Fabric is fully Python 3 compatible; as a cost, Python 2.5 support (a longstanding feature of Fabric 1) has been dropped - in fact, we've dropped support for anything older than Python 2.7. - The CLI task-oriented workflow remains a primary design goal, but the library use case is no longer a second-class citizen; instead, the library functionality has been designed first, with the CLI/task features built on top of it. - Additionally, within the CLI use case, version 1 placed too much emphasis on 'lazy' interactive prompts for authentication secrets or even connection parameters, driven in part by a lack of strong configuration mechanisms. Over time it became clear this wasn't worth the tradeoffs of having confusing noninteractive behavior and difficult debugging/testing procedures. Modern Fabric takes an arguably cleaner approach (based on functionality added to v1 over time) where users are encouraged to leverage the configuration system and/or serve the user prompts for runtime secrets at the *start* of the process; if the system determines it's missing information partway through, it raises exceptions instead of prompting. - Invoke's design includes :ref:`explicit user-facing testing functionality `; if you didn't find a way to write tests for your Fabric-using code before, it should be much easier now. - We recommend trying to write tests early on; they will help clarify the upgrade process for you & also make the process safer! .. _upgrading-api: API organization ---------------- High level code flow and API member concerns. .. list-table:: :widths: 40 10 50 * - Import everything via ``fabric.api`` - Removed - All useful imports are now available at the top level, e.g. ``from fabric import Connection``. * - Configure connection parameters globally (via ``env.host_string``, ``env.host``, ``env.port``, ``env.user``) and call global methods which implicitly reference them (``run``/``sudo``/etc) - Removed - The primary API is now properly OOP: instantiate `fabric.connection.Connection` objects and call their methods. These objects encapsulate all connection state (user, host, gateway, etc) and have their own SSH client instances. .. seealso:: `Connection.from_v1 ` * - Emphasis on serialized "host strings" as method of setting user, host, port, etc - Ported/Removed - `fabric.connection.Connection` *can* accept a shorthand "host string"-like argument, but the primary API is now explicit user, host, port, etc keyword arguments. Additionally, many arguments/settings/etc that expected a host string in v1 will now expect a `fabric.connection.Connection` instance instead. * - Use of "roles" as global named lists of host strings - Ported - This need is now served by `fabric.group.Group` objects (which wrap some number of `fabric.connection.Connection` instances with "do a thing to all members" methods.) Users can create & organize these any way they want. See the line items for ``--roles`` (:ref:`upgrading-cli`), ``env.roles`` (:ref:`upgrading-env`) and ``@roles`` (:ref:`upgrading-tasks`) for the status of those specifics. .. _upgrading-tasks: Task functions & decorators --------------------------- .. note:: Nearly all task-related functionality is implemented in Invoke; for more details see its :ref:`execution ` and :ref:`namespaces ` documentation. .. list-table:: :widths: 40 10 50 * - By default, tasks are loaded from a ``fabfile.py`` which is sought up towards filesystem root from the user's current working directory - Ported - This behavior is basically identical today, with minor modifications and enhancements (such as tighter control over the load process, and API hooks for implementing custom loader logic - see :ref:`loading-collections`.) * - "Classic" style implicit task functions lacking a ``@task`` decorator - Removed - These were on the way out even in v1, and arbitrary task/namespace creation is more explicitly documented now, via Invoke's `~invoke.tasks.Task` and `~invoke.collection.Collection`. * - "New" style ``@task``-decorated, module-level task functions - Ported - Largely the same, though now with superpowers - `@task ` can still be used without any parentheses, but where v1 only had a single ``task_class`` argument, the new version (largely based on Invoke's) has a number of namespace and parser hints, as well as execution related options (such as those formerly served by ``@hosts`` and friends). * - Arbitrary task function arguments (i.e. ``def mytask(any, thing, at, all)``) - Ported - This gets its own line item because: tasks must now take a `~invoke.context.Context` (vanilla Invoke) or `fabric.connection.Connection` (Fabric) object as their first positional argument. The rest of the function signature is, as before, totally up to the user & will get automatically turned into CLI flags. This sacrifices a small bit of the "quick DSL" of v1 in exchange for a cleaner, easier to understand/debug, and more user-overrideable API structure. As a side effect, it lessens the distinction between "module of functions" and "class of methods"; users can more easily start with the former and migrate to the latter when their needs grow/change. * - Implicit task tree generation via import-crawling - Ported/Removed - Namespace construction is now more explicit; for example, imported modules in your ``fabfile.py`` are no longer auto-scanned and auto-added to the task tree. However, the root ``fabfile.py`` *is* automatically loaded (using `Collection.from_module `), preserving the simple/common case. See :ref:`task-namespaces` for details. We may reinstate (in an opt-in fashion) imported module scanning later, since the use of explicit namespace objects still allows users control over the tree that results. * - ``@hosts`` for determining the default host or list of hosts a given task uses - Ported - Reinstated as the ``hosts`` parameter of `@task `. Further, it can now handle dicts of `fabric.connection.Connection` kwargs in addition to simple host strings. * - ``@roles`` for determining the default list of group-of-host targets a given task uses - Pending - See :ref:`upgrading-api` for details on the overall 'roles' concept. When it returns, this will probably follow ``@hosts`` and become some ``@task`` argument. * - ``@serial``/``@parallel``/``@runs_once`` - Ported/`Pending `__ - Parallel execution is currently offered at the API level via `fabric.group.Group` subclasses such as `fabric.group.ThreadingGroup`; however, designating entire sessions and/or tasks to run in parallel (or to exempt from parallelism) has not been solved yet. The problem needs solving at a higher level than just SSH targets, so this links to an Invoke-level ticket. * - ``execute`` for calling named tasks from other tasks while honoring decorators and other execution mechanics (as opposed to calling them simply as functions) - `Pending `__ - This is one of the top "missing features" from the rewrite; link is to Invoke's tracker. * - ``Task`` class for programmatic creation of tasks (as opposed to using some function object and the ``@task`` decorator) - Ported - While not sharing many implementation details with v1, modern Fabric (via Invoke) has a publicly exposed `~invoke.tasks.Task` class, which alongside `~invoke.collection.Collection` allow full programmatic creation of task trees, no decorator needed. .. _upgrading-cli: CLI arguments, options and behavior ----------------------------------- .. list-table:: :widths: 40 10 50 * - Exposure of task arguments as custom colon/comma delimited CLI arguments, e.g. ``fab mytask:posarg,kwarg=val`` - Removed - CLI arguments are now proper GNU/POSIX-style long and short flags, including globbing shortflags together, space or equals signs to attach values, optional values, and much more. See :ref:`invoking-tasks`. * - Task definition names are mirrored directly on the command-line, e.g for task ``def journald_logs()``, command line argument is ``fab journald_logs`` - Removed - Tasks names now get converted from underscores to hyphens. Eg. task ``def journald_logs()`` now evaluates to ``fab journald-logs`` on the commandline. * - Ability to invoke multiple tasks in a single command line, e.g. ``fab task1 task2`` - Ported - Works great! * - ``python -m fabric`` as stand-in for ``fab`` - Ported - Ported in 2.2. * - ``-a``/``--no_agent`` for disabling automatic SSH agent key selection - Removed - To disable use of an agent permanently, set config value ``connect_kwargs.allow_agent`` to ``False``; to disable temporarily, unset the ``SSH_AUTH_SOCK`` env var. * - ``-A``/``--forward-agent`` for enabling agent forwarding to the remote end - Removed - The config and kwarg versions of this are ported, but there is currently no CLI flag. Usual "you can set the config value at runtime with a shell env variable" clause is in effect, so this *may* not get ported, depending. * - ``--abort-on-prompts`` to turn interactive prompts into exceptions (helps avoid 'hanging' sessions) - Removed - See the notes about interactive prompts going away in :ref:`upgrading-general`. Without mid-session prompts, there's no need for this option. * - ``-c``/``--config`` for specifying an alternate config file path - Ported - ``--config`` lives on, but the short flag is now ``-f`` (``-c`` now determines which collection module name is sought by the task loader.) * - ``--colorize-errors`` (and ``env.colorize_errors``) to enable ANSI coloring of error output - `Pending `__ - Very little color work has been done yet and this is one of the potentially missing pieces. We're unsure how often this was used in v1 so it's possible it won't show up again, but generally, we like using color as an additional output vector, so... * - ``-d``/``--display`` for showing info on a given command - Ported - This is now the more standard ``-h``/``--help``, and can be given in either "direction": ``fab -h mytask`` or ``fab mytask -h``. * - ``-D``/``--disable-known-hosts`` to turn off Paramiko's automatic loading of user-level ``known_hosts`` files - `Pending `__ - Not ported yet, probably will be. * - ``-e``/``--eagerly-disconnect`` (and ``env.eagerly_disconnect``) which tells the execution system to disconnect from hosts as soon as a task is done running - Ported/`Pending `__ - There's no explicit connection cache anymore, so eager disconnection should be less necessary. However, investigation and potential feature toggles are still pending. * - ``-f``/``--fabfile`` to select alternate fabfile location - Ported - This is now split up into ``-c``/``--collection`` and ``-r``/``--search-root``; see :ref:`loading-collections`. * - ``-g``/``--gateway`` (and ``env.gateway``) for selecting a global SSH gateway host string - `Pending `__ - One can set the global ``gateway`` config option via an environment variable, which at a glance would remove the need for a dedicated CLI option. However, this approach only allows setting string values, which in turn only get used for ``ProxyCommand`` style gatewaying, so it *doesn't* replace v1's ``--gateway`` (which took a host string and turned it into a ``ProxyJump`` style gateway). Thus, if enough users notice the lack, we'll consider a feature-add that largely mimics the v1 behavior: string becomes first argument to `fabric.connection.Connection` and that resulting object is then set as ``gateway``. * - ``--gss-auth``/``--gss-deleg``/``--gss-kex`` - Removed - These didn't seem used enough to be worth porting over, especially since they fall under the usual umbrella of "Paramiko-level connect passthrough" covered by the ``connect_kwargs`` config option. (Which, if necessary, can be set at runtime via shell environment variables, like any other config value.) * - ``--hide``/``--show`` for tweaking output display globally - Removed - This is configurable via the config system and env vars. * - ``-H``/``--hosts`` - Ported - Works basically the same as before - if given, is shorthand for executing any given tasks once per host. * - ``-i`` for SSH key filename selection - Ported - Works same as v1, including ability to give multiple times to build a list of keys to try. * - ``-I``/``--initial-password-prompt`` for requesting an initial pre-execution password prompt - Ported - It's now :option:`--prompt-for-login-password`, :ref:`--prompt-for-sudo-password ` or :option:`--prompt-for-passphrase`, depending on whether you were using the former to fill in passwords or key passphrases (or both.) * - ``--initial-sudo-password-prompt`` for requesting an initial pre-execution sudo password prompt - Ported - This is now :option:`--prompt-for-sudo-password`. Still a bit of a mouthful but still 4 characters shorter! * - ``-k``/``--no-keys`` which prevents Paramiko's automatic loading of key files such as ``~/.ssh/id_rsa`` - Removed - Use environment variables to set the ``connect_kwargs.look_for_keys`` config value to ``False``. * - ``--keepalive`` for setting network keepalive - `Pending `__ - Not ported yet. * - ``-l``/``--list`` for listing tasks, plus ``-F``/``--list-format`` for tweaking list display format - Ported - Now with bonus JSON list-format! Which incidentally replaces ``-F short``/``--shortlist``. * - ``--linewise`` for buffering output line by line instead of roughly byte by byte - Removed - This doesn't really fit with the way modern command execution code views the world, so it's gone. * - ``-n``/``--connection-attempts`` controlling multiple connect retries - `Pending `__ - Not ported yet. * - ``--no-pty`` to disable automatic PTY allocation in ``run``, etc - Ported - Is now ``-p``/``--pty`` as the default behavior was switched around. * - ``--password``/``--sudo-password`` for specifying login/sudo password values - Removed - This is typically not very secure to begin with, and there are now many other avenues for setting the related configuration values, so they're gone at least for now. * - ``-P``/``--parallel`` for activating global parallelism - `Pending `__ - See the notes around ``@parallel`` in :ref:`upgrading-tasks`. * - ``--port`` to set default SSH port - Removed - Our gut says this is best left up to the configuration system's env var layer, or use of the ``port`` kwarg on `fabric.connection.Connection`; however it may find its way back. * - ``r``/``--reject-unknown-hosts`` to modify Paramiko known host behavior - `Pending `__ - Not ported yet. * - ``-R``/``--roles`` for global list-of-hosts target selection - `Pending `__ - As noted under :ref:`upgrading-api`, role lists are only partially applicable to the new API and we're still feeling out whether/how they would work at a global or CLI level. * - ``--set key=value`` for setting ``fabric.state.env`` vars at runtime - Removed - This is largely obviated by the new support for shell environment variables (just do ``INVOKE_KEY=value fab mytask`` or similar), though it's remotely possible a CLI flag method of setting config values will reappear later. * - ``-s``/``--shell`` to override default shell path - Removed - Use the configuration system for this. * - ``--shortlist`` for short/computer-friendly list output - Ported - See ``--list``/``--list-format`` - there's now a JSON format instead. No point reinventing the wheel. * - ``--skip-bad-hosts`` (and ``env.skip_bad_hosts``) to bypass problematic hosts - `Pending `__ - Not ported yet. * - ``--skip-unknown-tasks`` and ``env.skip_unknown_tasks`` for silently skipping past bogus task names on CLI invocation - Removed - This felt mostly like bloat to us and could require nontrivial parser changes to reimplement, so it's out for now. * - ``--ssh-config-path`` and ``env.ssh_config_path`` for selecting an SSH config file - Ported - This is now ``-S``/``--ssh-config``. * - ``--system-known-hosts`` to trigger loading systemwide ``known_hosts`` files - `Pending `__/Removed - This isn't super likely to come back as its own CLI flag but it may well return as a configuration value. * - ``-t``/``--timeout`` controlling connection timeout - Ported - It's now ``-t``/``--connect-timeout`` as ``--timeout`` was technically ambiguous re: connect vs command timeout. * - ``-T``/``--command-timeout`` - Ported - Implemented in Invoke and preserved in ``fab`` under the same name. * - ``-u``/``--user`` to set global default username - Removed - Most of the time, configuration (env vars for true runtime, or eg user/project level config files as appropriate) should be used for this, but it may return. * - ``-w``/``--warn-only`` to toggle warn-vs-abort behavior - Ported - Ported as-is, no changes. * - ``-x``/``--exclude-hosts`` (and ``env.exclude_hosts``) for excluding otherwise selected targets - `Pending `__ - Not ported yet, is pending an in depth rework of global (vs hand-instantiated) connection/group selection. * - ``-z``/``--pool-size`` for setting parallel-mode job queue pool size - Removed - There's no job queue anymore, or at least at present. Whatever replaces it (besides the already-implemented threading model) is likely to look pretty different. .. _upgrading-commands: Shell command execution (``local``/``run``/``sudo``) ---------------------------------------------------- General ~~~~~~~ Behaviors shared across either ``run``/``sudo``, or all of ``run``/``sudo``/``local``. Subsequent sections go into per-function differences. .. list-table:: :widths: 40 10 50 * - ``local`` and ``run``/``sudo`` have wildly differing APIs and implementations - Removed - All command execution is now unified; all three functions (now methods on `fabric.connection.Connection`, though ``local`` is also available as `invoke.run` for standalone use) have the same underlying protocol and logic (the `~invoke.runners.Runner` class hierarchy), with only low-level details like process creation and pipe consumption differing. For example, in v1 ``local`` required you to choose between displaying and capturing subprocess output; modern ``local`` is like ``run`` and does both at the same time. * - Prompt auto-response, via ``env.prompts`` and/or ``sudo``'s internals - Ported - The ``env.prompts`` functionality has been significantly fleshed out, into a framework of :ref:`Watchers ` which operate on any (local or remote!) running command's input and output streams. In addition, ``sudo`` has been rewritten to use that framework; while still useful enough to offer an implementation in core, it no longer does anything users cannot do themselves using public APIs. * - ``fabric.context_managers.cd``/``lcd`` (and ``prefix``) allow scoped mutation of executed comments - Ported/`Pending `__ - These are now methods on `~invoke.context.Context` (`Context.cd `, `Context.prefix `) but need work in its subclass `fabric.connection.Connection` (quite possibly including recreating ``lcd``) so that local vs remote state are separated. * - ``fabric.context_managers.shell_env`` and its specific expression ``path`` (plus ``env.shell_env``, ``env.path`` and ``env.path_behavior``), for modifying remote environment variables (locally, one would just modify `os.environ`.) - Ported - The context managers were the only way to set environment variables at any scope; in modern Fabric, subprocess shell environment is controllable per-call (directly in `fabric.connection.Connection.run` and siblings via an ``env`` kwarg) *and* across multiple calls (by manipulating the configuration system, statically or at runtime.) * - Controlling subprocess output & other activity display text by manipulating ``fabric.state.output`` (directly or via ``fabric.context_managers.hide``, ``show`` or ``quiet`` as well as the ``quiet`` kwarg to ``run``/``sudo``; plus ``utils.puts``/``fastprint``) - Ported/`Pending `__ - The core concept of "output levels" is gone, likely to be replaced in the near term by a logging module (stdlib or other) which output levels poorly reimplemented. Command execution methods like `~invoke.runners.Runner.run` retain a ``hide`` kwarg controlling which subprocess streams are copied to your terminal, and an ``echo`` kwarg controlling whether commands are printed before execution. All of these also honor the configuration system. * - ``timeout`` kwarg and the ``CommandTimeout`` exception raised when said command-runtime timeout was violated - Ported - Primarily lives at the Invoke layer now, but applies to all command execution, local or remote; see the ``timeout`` argument to `~invoke.runners.Runner.run` and its related configuration value and CLI flag. * - ``pty`` kwarg and ``env.always_use_pty``, controlling whether commands run in a pseudo-terminal or are invoked directly - Ported - This has been thoroughly ported (and its behavior often improved) including preservation of the ``pty`` kwarg and updating the config value to be simply ``run.pty``. However, a major change is that pty allocation is now ``False`` by default instead of ``True``. Fabric 0.x and 1.x already changed this value around; during Fabric 1's long lifetime it became clear that neither default works for all or even most users, so we opted to return the default to ``False`` as it's cleaner and less wasteful. * - ``combine_stderr`` (kwarg and ``env.combine_stderr``) controlling whether Paramiko weaves remote stdout and stderr into the stdout stream - Removed - This wasn't terrifically useful, and often caused conceptual problems in tandem with ``pty`` (as pseudo-terminals by their nature always combine the two streams.) We recommend users who really need both streams to be merged, either use shell redirection in their command, or set ``pty=True``. * - ``warn_only`` kwarg for preventing automatic abort on non-zero return codes - Ported - This is now just ``warn``, both kwarg and config value. It continues to default to ``False``. * - ``stdout`` and ``stderr`` kwargs for reassigning default stdout/err mirroring targets, which otherwise default to the appropriate `sys` members - Ported - These are now ``out_stream`` and ``err_stream`` but otherwise remain similar in nature. They are also accompanied by the new, rather obvious in hindsight ``in_stream``. * - ``capture_buffer_size`` arg & use of a ring buffer for storing captured stdout/stderr to limit total size - `Pending `__ - Existing `~invoke.runners.Runner` implementation uses regular lists for capture buffers, but we fully expect to upgrade this to a ring buffer or similar at some point. * - Return values are string-like objects with extra attributes like ``succeeded`` and ``return_code`` sprinkled on top - Ported - Return values are no longer string-a-likes with a semi-private API, but are full fledged regular objects of type `~invoke.runners.Result`. They expose all of the same info as the old "attribute strings", and only really differ in that they don't pretend to be strings themselves. They do, however, still behave as booleans - just ones reflecting the exit code's relation to zero instead of whether there was any stdout. * - ``open_shell`` for obtaining interactive-friendly remote shell sessions (something that ``run`` historically was bad at ) - Ported - Technically "removed", but only because the new version of ``run`` is vastly improved and can deal with interactive sessions at least as well as the old ``open_shell`` did, if not moreso. ``c.run("/my/favorite/shell", pty=True)`` should be all you need. ``run`` ~~~~~~~ .. list-table:: :widths: 40 10 50 * - ``shell`` / ``env.use_shell`` designating whether or not to wrap commands within an explicit call to e.g. ``/bin/sh -c "real command"``; plus their attendant options like ``shell_escape`` - Removed - Non-``sudo`` remote execution never truly required an explicit shell wrapper: the remote SSH daemon hands your command string off to the connecting user's login shell in almost all cases. Since wrapping is otherwise extremely error-prone and requires frustrating escaping rules, we dropped it for this use case. See the matching line items for ``local`` and ``sudo`` as their situations differ. (For now, because they all share the same underpinnings, `fabric.connection.Connection.run` does accept a ``shell`` kwarg - it just doesn't do anything with it.) ``sudo`` ~~~~~~~~ Unless otherwise noted, all common ``run``+``sudo`` args/functionality (e.g. ``pty``, ``warn_only`` etc) are covered above in the section on ``run``; the below are ``sudo`` specific. .. list-table:: :widths: 40 10 50 * - ``shell`` / ``env.use_shell`` designating whether or not to wrap commands within an explicit call to e.g. ``/bin/sh -c "real command"`` - `Pending `__/Removed - See the note above under ``run`` for details on shell wrapping as a general strategy; unfortunately for ``sudo``, some sort of manual wrapping is still necessary for nontrivial commands (i.e. anything using actual shell syntax as opposed to a single program's argv) due to how the command string is handed off to the ``sudo`` program. We hope to upgrade ``sudo`` soon so it can perform a common-best-case, no-escaping-required shell wrapping on your behalf; see the 'Pending' link. * - ``user`` argument (and ``env.sudo_user``) allowing invocation via ``sudo -u `` (instead of defaulting to root) - Ported - This is still here, and still called ``user``. * - ``group`` argument controlling the effective group of the sudo'd command - `Pending `__ - This has not been ported yet. ``local`` ~~~~~~~~~ See the 'general' notes at top of this section for most details about the new ``local``. A few specific extras are below. .. list-table:: :widths: 40 10 50 * - ``shell`` kwarg designating which shell to ask `subprocess.Popen` to use - Ported - Basically the same as in v1, though there are now situations where `os.execve` (or similar) is used instead of `subprocess.Popen`. Behavior is much the same: no shell wrapping (as in legacy ``run``), just informing the operating system what actual program to run. .. _upgrading-utility: Utilities --------- .. list-table:: :widths: 40 10 50 * - Error handling via ``abort`` and ``warn`` - Ported - The old functionality leaned too far in the "everything is a DSL" direction & didn't offer enough value to offset how it gets in the way of experienced Pythonistas. These functions have been removed in favor of "just raise an exception" (with one useful option being Invoke's `~invoke.exceptions.Exit`) as exception handling feels more Pythonic than thin wrappers around ``sys.exit`` or having to ``except SystemExit:`` and hope it was a `SystemExit` your own code raised! * - ANSI color helpers in ``fabric.colors`` allowed users to easily print ANSI colored text without a standalone library - Removed - There seemed no point to poorly replicating one of the many fine terminal-massaging libraries out there (such as those listed in the description of `#101 `_) in the rewrite, so we didn't. That said, it seems highly plausible we'll end up vendoring such a library in the future to offer internal color support, at which point "baked-in" color helpers would again be within easy reach. * - ``with char_buffered`` context manager for forcing a local stream to be character buffered - Ported - This is now `~invoke.terminals.character_buffered`. * - ``docs.unwrap_tasks`` for extracting docstrings from wrapped task functions - Ported - v1 required using a Fabric-specific 'unwrap_tasks' helper function somewhere in your Sphinx build pipeline; now you can instead just enable the new `invocations.autodoc `_ Sphinx mini-plugin in your extensions list; see link for details. * - ``network.normalize``, ``denormalize`` and ``parse_host_string``, ostensibly internals but sometimes exposed to users for dealing with host strings - Removed - As with other host-string-related tools, these are gone and serve no purpose. `fabric.connection.Connection` is now the primary API focus and has individual attributes for all "host string" components. * - ``utils.indent`` for indenting/wrapping text (uncommonly used) - Pending - Not ported yet; ideally we'll just vendor a third party lib in Invoke. * - ``reboot`` for rebooting and reconnecting to a remote system - Removed - No equivalent has been written for modern Fabric; now that the connection/client objects are made explicit, one can simply instantiate a new object with the same parameters (potentially with sufficient timeout parameters to get past the reboot, if one doesn't want to manually call something like `time.sleep`.) There is a small chance it will return if there appears to be enough need; if so, it's likely to be a more generic reconnection related `fabric.connection.Connection` method, where the user is responsible for issuing the restart shell command via ``sudo`` themselves. * - ``require`` for ensuring certain key(s) in ``env`` have values set, optionally by noting they can be ``provided_by=`` a list of setup tasks - Removed - This has not been ported, in part because the maintainers never used it themselves, and is unlikely to be directly reimplemented. However, its core use case of "require certain data to be available to run a given task" may return within the upcoming dependency framework. * - ``prompt`` for prompting the user & storing the entered data (optionally with validation) directly into ``env`` - Removed - Like ``require``, this seemed like a less-used feature (especially compared to its sibling ``confirm``) and was not ported. If it returns it's likely to be via ``invocations``, which is where ``confirm`` ended up. .. _upgrading-networking: Networking ---------- .. list-table:: :widths: 40 10 50 * - ``env.gateway`` for setting an SSH jump gateway - Ported - This is now the ``gateway`` kwarg to `fabric.connection.Connection`, and -- for the newly supported ``ProxyJump`` style gateways, which can be nested indefinitely! -- should be another `fabric.connection.Connection` object instead of a host string. (You may specify a runtime, non-SSH-config-driven ``ProxyCommand``-style string as the ``gateway`` kwarg instead, which will act just like a regular ``ProxyCommand``.) * - ``ssh_config``-driven ``ProxyCommand`` support - Ported - This continues to work as it did in v1. * - ``with remote_tunnel(...):`` port forwarding - Ported - This is now `fabric.connection.Connection.forward_local`, since it's used to *forward* a *local* port to the remote end. (Newly added is the logical inverse, `fabric.connection.Connection.forward_remote`.) * - ``NetworkError`` raised on some network related errors - Removed - In v1 this was simply a (partially implemented) stepping-back from the original "just sys.exit on any error!" behavior. Modern Fabric is significantly more exception-friendly; situations that would raise ``NetworkError`` in v1 now simply become the real underlying exceptions, typically from Paramiko or the stdlib. * - ``env.keepalive`` for setting network keepalive value - `Pending `__ - Not ported yet. * - ``env.connection_attempts`` for setting connection retries - `Pending `__ - Not ported yet. * - ``env.timeout`` for controlling connection (and sometimes command execution) timeout - Ported - Connection timeout is now controllable both via the configuration system (as ``timeouts.connect``) and a direct kwarg on `fabric.connection.Connection`. Command execution timeout is its own setting now, ``timeouts.command`` and a ``timeout`` kwarg to ``run`` and friends. Authentication -------------- .. note:: Some ``env`` keys from v1 were simply passthroughs to Paramiko's `SSHClient.connect ` method. Modern Fabric gives you explicit control over the arguments it passes to that method, via the ``connect_kwargs`` :ref:`configuration ` subtree, and the below table will frequently refer you to that approach. .. list-table:: :widths: 40 10 50 * - ``env.key_filename`` - Ported - Use ``connect_kwargs``. * - ``env.password`` - Ported - Use ``connect_kwargs``. Also note that this used to perform double duty as connection *and* sudo password; the latter is now found in the ``sudo.password`` setting. * - ``env.gss_(auth|deleg|kex)`` - Ported - Use ``connect_kwargs``. * - ``env.key``, a string or file object holding private key data, whose specific type is auto-determined and instantiated for use as the ``pkey`` connect kwarg - Removed - This has been dropped as unnecessary (& bug-prone) obfuscation of Paramiko-level APIs; users should already know which type of key they're dealing with and instantiate a ``PKey`` subclass themselves, placing the result in ``connect_kwargs.pkey``. * - ``env.no_agent``, which is a renaming/inversion of Paramiko's ``allow_agent`` connect kwarg - Ported - Users who were setting this to ``True`` should now simply set ``connect_kwargs.allow_agent`` to ``False`` instead. * - ``env.no_keys``, similar to ``no_agent``, just an inversion of the ``look_for_keys`` connect kwarg - Ported - Use ``connect_kwargs.look_for_keys`` instead (setting it to ``False`` to disable Paramiko's default key-finding behavior.) * - ``env.passwords`` (and ``env.sudo_passwords``) stores connection/sudo passwords in a dict keyed by host strings - Ported/`Pending `__ - Each `fabric.connection.Connection` object may be configured with its own ``connect_kwargs`` given at instantiation time, allowing for per-host password configuration already. However, we expect users may want a simpler way to set configuration values that are turned into implicit `fabric.connection.Connection` objects automatically; such a feature is still pending. * - Configuring ``IdentityFile`` in one's ``ssh_config`` - Ported - Still honored, along with a bunch of newly honored ``ssh_config`` settings; see :ref:`ssh-config`. .. _upgrading-transfers: File transfer ------------- The below feature breakdown applies to the ``put`` and/or ``get`` "operation" functions from v1. .. list-table:: :widths: 40 10 50 * - Transferring individual files owned by the local and remote user - Ported - Basic file transfer in either direction works and is offered as `fabric.connection.Connection.get`/`fabric.connection.Connection.put` (though the code is split out into a separate-responsibility class, `fabric.transfer.Transfer`.) The signature of these methods has been cleaned up compared to v1, though their positional-argument essence (``get(remote, local)`` and ``put(local, remote)`` remains the same. * - Omit the 'destination' argument for implicit 'relative to local context' behavior (e.g. ``put("local.txt")`` implicitly uploading to remote ``$HOME/local.txt``.) - Ported - You should probably still be explicit, because this is Python. * - Use either file paths *or* file-like objects on either side of the transfer operation (e.g. uploading a ``StringIO`` instead of an on-disk file) - Ported - This was a useful enough and simple enough trick to keep around. * - Preservation of source file mode at destination (e.g. ensuring an executable bit that would otherwise be dropped by the destination's umask, is re-added.) - Ported - Not only was this ported, but it is now the default behavior. It may be disabled via kwarg if desired. * - Bundled ``sudo`` operations as part of file transfer - Removed - This was one of the absolute buggiest parts of v1 and never truly did anything users could not do themselves with a followup call to ``sudo``, so we opted not to port it. Should enough users pine for its loss, we *may* reconsider, but if we do it will be with a serious eye towards simplification and/or an approach not involving intermediate files. * - Recursive multi-file transfer (e.g. ``put(a_directory)`` uploads entire directory and all its contents) - Removed - This was *another* one of the buggiest parts of v1, and over time it became clear that its maintenance burden far outweighed the fact that it was poorly reinventing ``rsync`` and/or the use of archival file tools like ye olde ``tar``+``gzip``. For one potential workaround, see the ``rsync`` function in `patchwork `_. * - Remote file path tilde expansion - Removed - This behavior is ultimately unnecessary (one can simply leave the tilde off for the same result) and had a few pernicious bugs of its own, so it's gone. * - Naming downloaded files after some aspect of the remote destination, to avoid overwriting during multi-server actions - Ported - Added back (to `fabric.transfer.Transfer.get`) in Fabric 2.6. .. _upgrading-configuration: Configuration ------------- In general, configuration has been massively improved over the old ``fabricrc`` files; most config logic comes from :ref:`Invoke's configuration system `, which offers a full-fledged configuration hierarchy (in-code config, multiple config file locations, environment variables, CLI flags, and more) and multiple file formats. Nearly all configuration avenues in Fabric 1 become, in modern Fabric, manipulation of whatever part of the config hierarchy is most appropriate for your needs. Modern versions of Fabric only make minor modifications to (or parameterizations of) Invoke's setup; see :ref:`our locally-specific config doc page ` for details. .. note:: Make sure to look elsewhere in this document for details on any given v1 ``env`` setting, as many have moved outside the configuration system into object or method keyword arguments. .. list-table:: :widths: 40 10 50 * - Modifying ``fabric.(api.)env`` directly - Ported - To effect truly global-scale config changes, use config files, task-collection-level config data, or the invoking shell's environment variables. * - Making locally scoped ``fabric.env`` changes via ``with settings(...):`` or its decorator equivalent, ``@with_settings`` - Ported/Pending - Most of the use cases surrounding ``settings`` are now served by the fact that `fabric.connection.Connection` objects keep per-host/connection state - the pattern of switching the implicit global context around was a design antipattern which is now gone. The remaining such use cases have been turned into context-manager methods of `fabric.connection.Connection` (or its parent class), or have such methods pending. * - SSH config file loading (off by default, limited to ``~/.ssh/config`` only unless configured to a different, single path) - Ported - Much improved: SSH config file loading is **on** by default (which :ref:`can be changed `), multiple sources are loaded and merged just like OpenSSH, and more besides; see :ref:`ssh-config`. In addition, we've added support for some ``ssh_config`` directives which were ignored by v1, such as ``ConnectTimeout`` and ``ProxyCommand``, and going forwards we intend to support as much of ``ssh_config`` as is reasonably possible. .. _upgrading-contrib: ``contrib`` ----------- The old ``contrib`` module represented "best practice" functions that did not, themselves, require core support from the rest of Fabric but were built using the same primitives available to users. In modern Fabric, that responsibility has been removed from the core library into other standalone libraries which have their own identity & release process, typically either `invocations `_ (local-oriented code that does not use SSH) or `patchwork `_ (primarily remote-oriented code, though anything not explicitly dealing with both ends of the connection will work just as well locally.) Those libraries are still a work in progress, not least because we still need to identify the best way to bridge the gap between them (as many operations are not intrinsically local-or-remote but can work on either end.) Since they are by definition built on the core APIs available to all users, they currently get less development focus; users can always implement their own versions without sacrificing much (something less true for the core libraries.) We expect to put more work into curating these collections once the core APIs have settled down. Details about what happened to each individual chunk of ``fabric.contrib`` are in the below table: .. list-table:: :widths: 40 10 50 * - ``console.confirm`` for easy bool-returning confirmation prompts - Ported - Moved to ``invocations.console.confirm``, with minor signature tweaks. * - ``django.*``, supporting integration with a local Django project re: importing and using Django models and other code - Removed - We aren't even sure if this is useful a decade after it was written, given how much Django has surely changed since then. If you're reading and are sad that this is gone, let us know! * - ``files.*`` (e.g. ``exists``, ``append``, ``contains`` etc) for interrogating and modifying remote files - Ported/Pending - Many of the more useful functions in this file have been ported to ``patchwork.files`` but are still in an essentially alpha state. Others, such as ``is_link``, ``comment``/``uncomment``, etc have not been ported yet. If they are, the are likely to end up in the same place. * - ``project.rsync_project`` for rsyncing the entire host project remotely - Ported - Now ``patchwork.transfers.rsync``, with some modifications. * - ``project.rsync_project`` for uploading host project via archive file and scp - Removed - This did not seem worth porting; the overall pattern of "copy my local bits remotely" is already arguably an antipattern (vs repeatable deploys of artifacts, or at least remote checkout of a VCS tag) and if one is going down that road anyways, rsync is a much smarter choice. .. _upgrading-env: ``fabric.env`` reference ------------------------ Many/most of the members in v1's ``fabric.env`` are covered in the above per-topic sections; any that are *not* covered elsewhere, live here. All are explicitly noted as ``env.`` for ease of searching in your browser or viewer. A small handful of env vars were never publicly documented & were thus implicitly private; those are not represented here. .. list-table:: :widths: 40 10 50 * - ``env.abort_exception`` for setting which exception is used to abort - Removed - Aborting as a concept is gone, just raise whatever exception seems most reasonable to surface to an end user, or use `~invoke.exceptions.Exit`. See also :ref:`upgrading-utility`. * - ``env.all_hosts`` and ``env.tasks`` listing execution targets - Ported/`Pending `__ - Fabric's `~invoke.executor.Executor` subclass stores references to all CLI parsing results (including the value of :option:`--hosts`, the tasks requested and their args, etc) and the intent is for users to have access to that information. However, the details for that API (e.g. exposing the executor via a task's `~invoke.context.Context`/`fabric.connection.Connection`) are still in flux. * - ``env.command`` noting currently executing task name (in hindsight, quite the misnomer...) - Ported/`Pending `__ - See the notes for ``env.all_hosts`` above - same applies here re: user visibility into CLI parsing results. * - ``env.command_prefixes`` for visibility into (arguably also mutation of) the shell command prefixes to be applied to ``run``/``sudo`` - Ported - This is now `~invoke.context.Context.command_prefixes`. * - ``env.cwd`` noting current intended working directory - Ported - This is now `~invoke.context.Context.command_cwds` (a list, not a single string, to more properly model the intended contextmanager-driven use case.) Note that remote-vs-local context for this data isn't yet set up; see the notes about ``with cd`` under :ref:`upgrading-commands`. * - ``env.dedupe_hosts`` controlling whether duplicate hosts in merged host lists get deduplicated or not - `Pending `__ - Not ported yet, will probably get tackled as part of roles/host lists overhaul. * - ``env.echo_stdin`` (undocumented) for turning off the default echoing of standard input - Ported - Is now a config option under the ``run`` tree, with much the same behavior. * - ``env.local_user`` for read-only access to the discovered local username - Removed - We're not entirely sure why v1 felt this was worth caching in the config; if you need this info, just import and call `fabric.util.get_local_user`. * - ``env.output_prefix`` determining whether or not line-by-line host-string prefixes are displayed - `Pending `__ - Differentiating parallel stdout/err is still a work in progress; we may end up reusing line-by-line logging and prefixing (ideally via actual logging) or we may try for something cleaner such as streaming to per-connection log files. * - ``env.prompts`` controlling prompt auto-response - Ported - Prompt auto-response is now publicly implemented as the `~invoke.watchers.StreamWatcher` and `~invoke.watchers.Responder` class hierarchy, instances of which can be handed to ``run`` via kwarg or stored globally in the config as ``run.watchers``. * - ``env.real_fabfile`` storing read-only fabfile path which was loaded by the CLI machinery - Ported - The loaded task `~invoke.collection.Collection` is stored on both the top level `~invoke.program.Program` object as well as the `~invoke.executor.Executor` which calls tasks; and `~invoke.collection.Collection` has a ``loaded_from`` attribute with this information. * - ``env.remote_interrupt`` controlling how interrupts (i.e. a local `KeyboardInterrupt` are caught, forwarded or other - Ported/Removed - Invoke's interrupt capture behavior is currently "always just send the interrupt character to the subprocess and continue", allowing subprocesses to handle ``^C`` however they need to, which is an improvement over Fabric 1 and roughly equivalent to setting ``env.remote_interrupt = True``. Allowing users to change this behavior via config is not yet implemented, and may not be, depending on whether anybody needs it - it was added as an option in v1 for backwards compat reasons. It is also technically possible to change interrupt behavior by subclassing and overriding `invoke.runners.Runner.send_interrupt`. * - ``env.roles``, ``env.roledefs`` and ``env.effective_roles`` controlling/exposing what roles are available or currently in play - `Pending `__ - As noted in :ref:`upgrading-api`, roles as a concept were ported to `fabric.group.Group`, but there's no central clearinghouse in which to store them. We *may* delegate this to userland forever, but seems likely a common-best-practice option (such as creating `Groups ` from some configuration subtree and storing them as a `~invoke.context.Context` attribute) will appear in early 2.x. * - ``env.ok_ret_codes`` for overriding the default "0 good, non-0 bad" error detection for subprocess commands - `Pending `__ - Not ported yet, but should involve some presumably minor updates to `invoke.runners.Runner.generate_result` and `~invoke.runners.Result`. * - ``env.sudo_prefix`` determining the sudo binary name + its flags used when creating ``sudo`` command strings - `Pending `__ - Sudo command construction does not currently look at the config for anything but the actual sudo prompt. * - ``env.sudo_prompt`` for setting the prompt string handed to ``sudo`` (and then expected in return for auto-replying with a configured password) - Ported - Is now ``sudo.prompt`` in the configuration system. * - ``env.use_exceptions_for`` to note which actions raise exceptions - Removed - As with most other functionality surrounding Fabric 1's "jump straight to `sys.exit`" design antipattern, this is gone - modern Fabric will not be hiding any exceptions from user-level code. * - ``env.use_ssh_config`` to enable off-by-default SSH config loading - Ported - SSH config loading is now on by default, but an option remains to disable it. See :ref:`upgrading-configuration` for more. * - ``env.version`` exposing current Fabric version number - Removed - Just ``import fabric`` and reference ``fabric.__version__`` (string) or ``fabric.__version_info__`` (tuple). Example upgrade process ======================= This section goes over upgrading a small but nontrivial Fabric 1 fabfile to work with modern Fabric. It's not meant to be exhaustive, merely illustrative; for a full list of how to upgrade individual features or concepts, see :ref:`upgrade-specifics`. Sample original fabfile ----------------------- Here's a (slightly modified to concur with 'modern' Fabric 1 best practices) copy of Fabric 1's final tutorial snippet, which we will use as our test case for upgrading:: from fabric.api import abort, env, local, run, settings, task from fabric.contrib.console import confirm env.hosts = ["my-server"] @task def test(): with settings(warn_only=True): result = local("./manage.py test my_app", capture=True) if result.failed and not confirm("Tests failed. Continue anyway?"): abort("Aborting at user request.") @task def commit(): local("git add -p && git commit") @task def push(): local("git push") @task def prepare_deploy(): test() commit() push() @task def deploy(): code_dir = "/srv/django/myproject" with settings(warn_only=True): if run("test -d {}".format(code_dir)).failed: cmd = "git clone user@vcshost:/path/to/repo/.git {}" run(cmd.format(code_dir)) with cd(code_dir): run("git pull") run("touch app.wsgi") We'll port this directly, meaning the result will still be ``fabfile.py``, though we'd like to note that writing your code in a more library-oriented fashion - even just as functions not wrapped in ``@task`` - can make testing and reusing code easier. Imports ------- In modern Fabric, we don't need to import nearly as many functions, due to the emphasis on object methods instead of global functions. We only need the following: - `~invoke.exceptions.Exit`, a friendlier way of requesting a `sys.exit`; - `@task `, as before, but coming from Invoke as it's not SSH-specific; - ``confirm``, which now comes from the Invocations library (also not SSH-specific; though Invocations is one of the descendants of ``fabric.contrib``, which no longer exists); :: from fabric import task from invoke import Exit from invocations.console import confirm Host list --------- The idea of a predefined *global* host list is gone; there is currently no direct replacement. In general, users can set up their own execution context, creating explicit `fabric.connection.Connection` and/or `fabric.group.Group` objects as needed; core Fabric is in the process of building convenience helpers on top of this, but "create your own Connections" will always be there as a backstop. Speaking of convenience helpers: most of the functionality of ``fab --hosts`` and ``@hosts`` has been ported over -- the former directly (see :option:`--hosts`), the latter as a `@task ` keyword argument. Thus, for now our example will be turning the global ``env.hosts`` into a lightweight module-level variable declaration, intended for use in the subsequent calls to ``@task``:: my_hosts = ["my-server"] .. note:: This is an area under active development, so feedback is welcomed. .. TODO: - pre-task example - true baked-in default example (requires some sort of config hook) Test task --------- The first task in the fabfile uses a good spread of the API. We'll outline the changes here (though again, all details are in :ref:`upgrade-specifics`): - Declaring a function as a task is nearly the same as before: use a ``@task`` decorator (which, in modern Fabric, can take more optional keyword arguments than its predecessor, including some which replace some of v1's decorators). - ``@task``-wrapped functions must now take an explicit initial context argument, whose value will be a `fabric.connection.Connection` object at runtime. - The use of ``with settings(warn_only=True)`` can be replaced by a simple kwarg to the ``local`` call. - That ``local`` call is now a method call on the `fabric.connection.Connection`, `fabric.connection.Connection.local`. - ``capture`` is no longer a useful argument; we can now capture and display at the same time, locally or remotely. If you don't actually *want* a local subprocess to mirror its stdout/err while it runs, you can simply say ``hide=True`` (or ``hide="stdout"`` or etc.) - Result objects are pretty similar between versions; modern Fabric's results no longer pretend to "be" strings, but instead act more like booleans, acting truthy if the command exited cleanly, and falsey otherwise. In terms of attributes exhibited, most of the same info is available, and more besides. - ``abort`` is gone; you should use whatever exceptions you feel are appropriate, or `~invoke.exceptions.Exit` for a `sys.exit` equivalent. (Or just call `sys.exit` if you want a no-questions-asked immediate exit that even our CLI machinery won't touch.) The result:: @task def test(c): result = c.local("./manage.py test my_app", warn=True) if not result and not confirm("Tests failed. Continue anyway?"): raise Exit("Aborting at user request.") Other simple tasks ------------------ The next two tasks are simple one-liners, and you've already seen what replaced the global ``local`` function:: @task def commit(c): c.local("git add -p && git commit") @task def push(c): c.local("git push") Calling tasks from other tasks ------------------------------ This is another area that is in flux at the Invoke level, but for now, we can simply call the other tasks as functions, just as was done in v1. The main difference is that we want to pass along our context object to preserve the configuration context (such as loaded config files or CLI flags):: @task def prepare_deploy(c): test(c) commit(c) push(c) Actual remote steps ------------------- Note that up to this point, nothing truly Fabric-related has been in play - `fabric.connection.Connection.local` is just a rebinding of `Context.run `, Invoke's local subprocess execution method. Now we get to the actual deploy step, which invokes `fabric.connection.Connection.run` instead, executing remotely (on whichever host the `fabric.connection.Connection` has been bound to). ``with cd`` is not fully implemented for the remote side of things, but we expect it will be soon. For now we fall back to command chaining with ``&&``. And, notably, now that we care about selecting host targets, we refer to our earlier definition of a default host list -- ``my_hosts`` -- when declaring the default host list for this task. :: @task(hosts=my_hosts) def deploy(c): code_dir = "/srv/django/myproject" if not c.run("test -d {}".format(code_dir), warn=True): cmd = "git clone user@vcshost:/path/to/repo/.git {}" c.run(cmd.format(code_dir)) c.run("cd {} && git pull".format(code_dir)) c.run("cd {} && touch app.wsgi".format(code_dir)) The whole thing --------------- Now we have the entire, upgraded fabfile that will work with modern Fabric:: from invoke import Exit from invocations.console import confirm from fabric import task my_hosts = ["my-server"] @task def test(c): result = c.local("./manage.py test my_app", warn=True) if not result and not confirm("Tests failed. Continue anyway?"): raise Exit("Aborting at user request.") @task def commit(c): c.local("git add -p && git commit") @task def push(c): c.local("git push") @task def prepare_deploy(c): test(c) commit(c) push(c) @task(hosts=my_hosts) def deploy(c): code_dir = "/srv/django/myproject" if not c.run("test -d {}".format(code_dir), warn=True): cmd = "git clone user@vcshost:/path/to/repo/.git {}" c.run(cmd.format(code_dir)) c.run("cd {} && git pull".format(code_dir)) c.run("cd {} && touch app.wsgi".format(code_dir)) fabric-2.6.0/tasks.py000066400000000000000000000117111400143053200144350ustar00rootroot00000000000000from functools import partial from os import environ, getcwd import sys from invocations import travis from invocations.checks import blacken from invocations.docs import docs, www, sites, watch_docs from invocations.pytest import test, integration as integration_, coverage from invocations.packaging import release from invocations.util import tmpdir from invoke import Collection, task from invoke.util import LOG_FORMAT # Neuter the normal release.publish task to prevent accidents, then reinstate # it as a custom task that does dual fabric-xxx and fabric2-xxx releases. # TODO: tweak this once release.all_ actually works right...sigh # TODO: if possible, try phrasing as a custom build that builds x2, and then # convince the vanilla publish() to use that custom build instead of its local # build? # NOTE: this skips the dual_wheels, alt_python bits the upstream task has, # which are at the moment purely for Invoke's sake (as it must publish explicit # py2 vs py3 wheels due to some vendored dependencies) @task def publish( c, sdist=True, wheel=False, index=None, sign=False, dry_run=False, directory=None, check_desc=False, ): # TODO: better pattern for merging kwargs + config config = c.config.get("packaging", {}) index = config.get("index", index) sign = config.get("sign", sign) check_desc = config.get("check_desc", check_desc) # Initial sanity check, if needed. Will die usefully. # TODO: this could also get factored out harder in invocations. shrug. it's # like 3 lines total... if check_desc: c.run("python setup.py check -r -s") with tmpdir(skip_cleanup=dry_run, explicit=directory) as directory: # Doesn't reeeeally need to be a partial, but if we start having to add # a kwarg to one call or the other, it's nice builder = partial( release.build, c, sdist=sdist, wheel=wheel, directory=directory ) # Vanilla build builder() # Fabric 2 build environ["PACKAGE_AS_FABRIC2"] = "yes" builder() # Upload release.upload(c, directory, index, sign, dry_run) @task def sanity_test_from_v1(c): """ Run some very quick in-process sanity tests on a dual fabric1-v-2 env. Assumes Fabric 2+ is already installed as 'fabric2'. """ # This cannot, by definition, work under Python 3 as Fabric 1 is not Python # 3 compatible. PYTHON = environ.get("TRAVIS_PYTHON_VERSION", "") if PYTHON.startswith("3") or PYTHON == "pypy3": return c.run("pip install 'fabric<2'") # Make darn sure the two copies of fabric are coming from install root, not # local directory - which would result in 'fabric' always being v2! for serious in (getcwd(), ""): if serious in sys.path: # because why would .remove be idempotent?! sys.path.remove(serious) from fabric.api import env from fabric2 import Connection env.gateway = "some-gateway" env.no_agent = True env.password = "sikrit" env.user = "admin" env.host_string = "localghost" env.port = "2222" cxn = Connection.from_v1(env) config = cxn.config assert config.run.pty is True assert config.gateway == "some-gateway" assert config.connect_kwargs.password == "sikrit" assert config.sudo.password == "sikrit" assert cxn.host == "localghost" assert cxn.user == "admin" assert cxn.port == 2222 # TODO: as usual, this just wants a good pattern for "that other task, with a # tweaked default arg value" @task def integration( c, opts=None, pty=True, x=False, k=None, verbose=True, color=True, capture="no", module=None, ): return integration_(c, opts, pty, x, k, verbose, color, capture, module) # Better than nothing, since we haven't solved "pretend I have some other # task's signature" yet... publish.__doc__ = release.publish.__doc__ my_release = Collection( "release", release.build, release.status, publish, release.prepare ) ns = Collection( blacken, coverage, docs, integration, my_release, sites, test, travis, watch_docs, www, sanity_test_from_v1, ) ns.configure( { "tests": { # TODO: have pytest tasks honor these? "package": "fabric", "logformat": LOG_FORMAT, }, "packaging": { # NOTE: this is currently for identifying the source directory. # Should it get used for actual releasing, needs changing. "package": "fabric", "sign": True, "wheel": True, "check_desc": True, "changelog_file": "sites/www/changelog.rst", }, # TODO: perhaps move this into a tertiary, non automatically loaded, # conf file so that both this & the code under test can reference it? # Meh. "travis": { "sudo": {"user": "sudouser", "password": "mypass"}, "black": {"version": "18.6b4"}, }, } ) fabric-2.6.0/tests/000077500000000000000000000000001400143053200140775ustar00rootroot00000000000000fabric-2.6.0/tests/_support/000077500000000000000000000000001400143053200157525ustar00rootroot00000000000000fabric-2.6.0/tests/_support/config.yml000066400000000000000000000005021400143053200177370ustar00rootroot00000000000000# # Settings overrides for test-executed Invoke code. Test code typically tries # specifying this via the -f CLI flag or the runtime arguments to Config(). # run: # Disable all stdin mirroring by default. Otherwise, pytest's capture plugin # gets all upset. It looks difficult to change that, too. in_stream: false fabric-2.6.0/tests/_support/fabfile.py000066400000000000000000000035401400143053200177160ustar00rootroot00000000000000from invoke import Context, task as invtask from fabric import task, Connection @task def build(c): pass @task def deploy(c): pass @task def basic_run(c): c.run("nope") @task def expect_vanilla_Context(c): assert isinstance(c, Context) assert not isinstance(c, Connection) @task def expect_from_env(c): assert c.config.run.echo is True @task def expect_mutation_to_fail(c): # If user level config changes are preserved between parameterized per-host # task calls, this would assert on subsequent invocations... assert "foo" not in c.config # ... because of this: c.config.foo = "bar" @task def mutate(c): c.foo = "bar" @task def expect_mutation(c): assert c.foo == "bar" @task def expect_identity(c): assert c.config.connect_kwargs["key_filename"] == ["identity.key"] @task def expect_identities(c): assert c.config.connect_kwargs["key_filename"] == [ "identity.key", "identity2.key", ] @task def expect_connect_timeout(c): assert c.config.connect_kwargs["timeout"] == 5 @task def first(c): print("First!") @task def third(c): print("Third!") @task(pre=[first], post=[third]) def second(c, show_host=False): if show_host: print("Second: {}".format(c.host)) else: print("Second!") @task(hosts=["myhost"]) def hosts_are_myhost(c): c.run("nope") @task(hosts=["host1", "host2"]) def two_hosts(c): c.run("nope") @task(hosts=["someuser@host1:1234"]) def hosts_are_host_stringlike(c): c.run("nope") @task(hosts=["admin@host1", {"host": "host2"}]) def hosts_are_mixed_values(c): c.run("nope") @task(hosts=[{"host": "host1", "user": "admin"}, {"host": "host2"}]) def hosts_are_init_kwargs(c): c.run("nope") @invtask def vanilla_Task_works_ok(c): assert isinstance(c, Context) assert not isinstance(c, Connection) fabric-2.6.0/tests/_support/json_conf/000077500000000000000000000000001400143053200177305ustar00rootroot00000000000000fabric-2.6.0/tests/_support/json_conf/fabfile.py000066400000000000000000000001361400143053200216720ustar00rootroot00000000000000from fabric import task @task def expect_conf_value(c): assert c.it_came_from == "json" fabric-2.6.0/tests/_support/json_conf/fabric.json000066400000000000000000000000371400143053200220510ustar00rootroot00000000000000{ "it_came_from": "json" } fabric-2.6.0/tests/_support/prompting.py000066400000000000000000000001661400143053200203460ustar00rootroot00000000000000from fabric import task @task def expect_connect_kwarg(c, key, val): assert c.config.connect_kwargs[key] == val fabric-2.6.0/tests/_support/py_conf/000077500000000000000000000000001400143053200174075ustar00rootroot00000000000000fabric-2.6.0/tests/_support/py_conf/fabfile.py000066400000000000000000000001341400143053200213470ustar00rootroot00000000000000from fabric import task @task def expect_conf_value(c): assert c.it_came_from == "py" fabric-2.6.0/tests/_support/py_conf/fabric.py000066400000000000000000000000241400143053200212030ustar00rootroot00000000000000it_came_from = "py" fabric-2.6.0/tests/_support/runtime_fabfile.py000066400000000000000000000005121400143053200214550ustar00rootroot00000000000000from fabric import task @task def runtime_ssh_config(c): # NOTE: assumes it's run with host='runtime' + ssh_configs/runtime.conf # TODO: SSHConfig should really learn to turn certain things into ints # automatically... assert c.ssh_config["port"] == "666" assert c.port == 666 @task def dummy(c): pass fabric-2.6.0/tests/_support/ssh_config/000077500000000000000000000000001400143053200200745ustar00rootroot00000000000000fabric-2.6.0/tests/_support/ssh_config/both_proxies.conf000066400000000000000000000001231400143053200234440ustar00rootroot00000000000000Host runtime ProxyCommand "netcat I guess" ProxyJump winner@everything:777 fabric-2.6.0/tests/_support/ssh_config/overridden_hostname.conf000066400000000000000000000000631400143053200250010ustar00rootroot00000000000000Host aliasname Hostname realname Port 2222 fabric-2.6.0/tests/_support/ssh_config/proxyjump.conf000066400000000000000000000000611400143053200230150ustar00rootroot00000000000000Host runtime ProxyJump jumpuser@jumphost:373 fabric-2.6.0/tests/_support/ssh_config/proxyjump_multi.conf000066400000000000000000000001411400143053200242260ustar00rootroot00000000000000Host runtime ProxyJump jumpuser@jumphost:373,jumpuser2@jumphost2:872,jumpuser3@jumphost3:411 fabric-2.6.0/tests/_support/ssh_config/proxyjump_multi_recursive.conf000066400000000000000000000000641400143053200263210ustar00rootroot00000000000000Host *.tld ProxyJump bastion1.tld,bastion2.tld fabric-2.6.0/tests/_support/ssh_config/proxyjump_recursive.conf000066400000000000000000000000451400143053200251060ustar00rootroot00000000000000Host *.tld ProxyJump bastion.tld fabric-2.6.0/tests/_support/ssh_config/runtime.conf000066400000000000000000000002621400143053200224260ustar00rootroot00000000000000Host runtime User abaddon Port 666 ForwardAgent yes ProxyCommand "my gateway" ConnectTimeout 15 IdentityFile whatever.key IdentityFile some-other.key fabric-2.6.0/tests/_support/ssh_config/runtime_identity.conf000066400000000000000000000002651400143053200243420ustar00rootroot00000000000000Host runtime # Named 'backwards' to sanity test ordering (though that's truly a # Paramiko level thing.) IdentityFile ssh-config-B.key IdentityFile ssh-config-A.key fabric-2.6.0/tests/_support/ssh_config/system.conf000066400000000000000000000000631400143053200222660ustar00rootroot00000000000000Host system Port 123 Host shared Port 123 fabric-2.6.0/tests/_support/ssh_config/user.conf000066400000000000000000000000611400143053200217160ustar00rootroot00000000000000Host user Port 321 Host shared Port 321 fabric-2.6.0/tests/_support/yaml_conf/000077500000000000000000000000001400143053200177215ustar00rootroot00000000000000fabric-2.6.0/tests/_support/yaml_conf/fabfile.py000066400000000000000000000001361400143053200216630ustar00rootroot00000000000000from fabric import task @task def expect_conf_value(c): assert c.it_came_from == "yaml" fabric-2.6.0/tests/_support/yaml_conf/fabric.yaml000066400000000000000000000000231400143053200220260ustar00rootroot00000000000000it_came_from: yaml fabric-2.6.0/tests/_support/yml_conf/000077500000000000000000000000001400143053200175605ustar00rootroot00000000000000fabric-2.6.0/tests/_support/yml_conf/fabfile.py000066400000000000000000000007071400143053200215260ustar00rootroot00000000000000from fabric import task @task def expect_conf_value(c): assert c.it_came_from == "yml" @task def expect_conf_key_filename(c): expected = ["private.key", "other.key"] got = c.connect_kwargs.key_filename assert got == expected, "{!r} != {!r}".format(got, expected) @task def expect_cli_key_filename(c): expected = ["cli.key"] got = c.connect_kwargs.key_filename assert got == expected, "{!r} != {!r}".format(got, expected) fabric-2.6.0/tests/_support/yml_conf/fabric.yml000066400000000000000000000001241400143053200215260ustar00rootroot00000000000000it_came_from: yml connect_kwargs: key_filename: - private.key - other.key fabric-2.6.0/tests/_util.py000066400000000000000000000037771400143053200156030ustar00rootroot00000000000000from contextlib import contextmanager import os import re import sys from invoke.vendor.lexicon import Lexicon from pytest_relaxed import trap from fabric.main import make_program support = os.path.join(os.path.abspath(os.path.dirname(__file__)), "_support") config_file = os.path.abspath(os.path.join(support, "config.yml")) # TODO: move invoke's support_path + load + etc somewhere importable? or into # pytest-relaxed, despite it not being strictly related to that feature set? # ugh @contextmanager def support_path(): sys.path.insert(0, support) try: yield finally: sys.path.pop(0) def load(name): with support_path(): imported = __import__(name) return imported # TODO: this could become a fixture in conftest.py, presumably, and just yield # stdout, allowing the tests themselves to assert more naturally @trap def expect(invocation, out, program=None, test="equals"): if program is None: program = make_program() program.run("fab {}".format(invocation), exit=False) output = sys.stdout.getvalue() if test == "equals": assert output == out elif test == "contains": assert out in output elif test == "regex": assert re.match(out, output) else: err = "Don't know how to expect that {} !" assert False, err.format(test) def faux_v1_env(): # Close enough to v1 _AttributeDict... # Contains a copy of enough of v1's defaults to prevent us having to do a # lot of extra .get()s...meh return Lexicon( always_use_pty=True, forward_agent=False, gateway=None, host_string="localghost", key_filename=None, no_agent=False, password=None, port=22, ssh_config_path=None, # Used in a handful of sanity tests, so it gets a 'real' value. eh. sudo_password="nope", sudo_prompt=None, timeout=None, use_ssh_config=False, user="localuser", warn_only=False, ) fabric-2.6.0/tests/config.py000066400000000000000000000305401400143053200157200ustar00rootroot00000000000000import errno from os.path import join, expanduser from paramiko.config import SSHConfig from invoke.vendor.lexicon import Lexicon from fabric import Config from fabric.util import get_local_user from mock import patch, call from _util import support, faux_v1_env class Config_: def defaults_to_merger_of_global_defaults(self): # I.e. our global_defaults + Invoke's global_defaults c = Config() # From invoke's global_defaults assert c.run.warn is False # From ours assert c.port == 22 def our_global_defaults_can_override_invokes(self): "our global_defaults can override Invoke's key-by-key" with patch.object( Config, "global_defaults", return_value={ "run": {"warn": "nope lol"}, # NOTE: Config requires these to be present to instantiate # happily "load_ssh_configs": True, "ssh_config_path": None, }, ): # If our global_defaults didn't win, this would still # resolve to False. assert Config().run.warn == "nope lol" def has_various_Fabric_specific_default_keys(self): c = Config() assert c.port == 22 assert c.user == get_local_user() assert c.forward_agent is False assert c.connect_kwargs == {} assert c.timeouts.connect is None assert c.ssh_config_path is None assert c.inline_ssh_env is False def overrides_some_Invoke_defaults(self): config = Config() # This value defaults to False in Invoke proper. assert config.run.replace_env is True assert config.tasks.collection_name == "fabfile" def uses_Fabric_prefix(self): # NOTE: see also the integration-esque tests in tests/main.py; this # just tests the underlying data/attribute driving the behavior. assert Config().prefix == "fabric" class from_v1: def setup(self): self.env = faux_v1_env() def _conf(self, **kwargs): self.env.update(kwargs) return Config.from_v1(self.env) def must_be_given_explicit_env_arg(self): config = Config.from_v1( env=Lexicon(self.env, sudo_password="sikrit") ) assert config.sudo.password == "sikrit" class additional_kwargs: def forwards_arbitrary_kwargs_to_init(self): config = Config.from_v1( self.env, # Vanilla Invoke overrides={"some": "value"}, # Fabric system_ssh_path="/what/ever", ) assert config.some == "value" assert config._system_ssh_path == "/what/ever" def subservient_to_runtime_overrides(self): env = self.env env.sudo_password = "from-v1" config = Config.from_v1( env, overrides={"sudo": {"password": "runtime"}} ) assert config.sudo.password == "runtime" def connect_kwargs_also_merged_with_imported_values(self): self.env["key_filename"] = "whatever" conf = Config.from_v1( self.env, overrides={"connect_kwargs": {"meh": "effort"}} ) assert conf.connect_kwargs["key_filename"] == "whatever" assert conf.connect_kwargs["meh"] == "effort" class var_mappings: def always_use_pty(self): # Testing both due to v1-didn't-use-None-default issues config = self._conf(always_use_pty=True) assert config.run.pty is True config = self._conf(always_use_pty=False) assert config.run.pty is False def forward_agent(self): config = self._conf(forward_agent=True) assert config.forward_agent is True def gateway(self): config = self._conf(gateway="bastion.host") assert config.gateway == "bastion.host" class key_filename: def base(self): config = self._conf(key_filename="/some/path") assert ( config.connect_kwargs["key_filename"] == "/some/path" ) def is_not_set_if_None(self): config = self._conf(key_filename=None) assert "key_filename" not in config.connect_kwargs def no_agent(self): config = self._conf() assert config.connect_kwargs.allow_agent is True config = self._conf(no_agent=True) assert config.connect_kwargs.allow_agent is False class password: def set_just_to_connect_kwargs_if_sudo_password_set(self): # NOTE: default faux env has sudo_password set already... config = self._conf(password="screaming-firehawks") passwd = config.connect_kwargs.password assert passwd == "screaming-firehawks" def set_to_both_password_fields_if_necessary(self): config = self._conf(password="sikrit", sudo_password=None) assert config.connect_kwargs.password == "sikrit" assert config.sudo.password == "sikrit" def ssh_config_path(self): self.env.ssh_config_path = "/where/ever" config = Config.from_v1(self.env, lazy=True) assert config.ssh_config_path == "/where/ever" def sudo_password(self): config = self._conf(sudo_password="sikrit") assert config.sudo.password == "sikrit" def sudo_prompt(self): config = self._conf(sudo_prompt="password???") assert config.sudo.prompt == "password???" def timeout(self): config = self._conf(timeout=15) assert config.timeouts.connect == 15 def use_ssh_config(self): # Testing both due to v1-didn't-use-None-default issues config = self._conf(use_ssh_config=True) assert config.load_ssh_configs is True config = self._conf(use_ssh_config=False) assert config.load_ssh_configs is False def warn_only(self): # Testing both due to v1-didn't-use-None-default issues config = self._conf(warn_only=True) assert config.run.warn is True config = self._conf(warn_only=False) assert config.run.warn is False class ssh_config_loading: "ssh_config loading" # NOTE: actual _behavior_ of loaded SSH configs is tested in Connection's # tests; these tests just prove that the loading itself works & the data is # correctly available. _system_path = join(support, "ssh_config", "system.conf") _user_path = join(support, "ssh_config", "user.conf") _runtime_path = join(support, "ssh_config", "runtime.conf") _empty_kwargs = dict( system_ssh_path="nope/nope/nope", user_ssh_path="nope/noway/nuhuh" ) def defaults_to_empty_sshconfig_obj_if_no_files_found(self): c = Config(**self._empty_kwargs) # TODO: Currently no great public API that lets us figure out if # one of these is 'empty' or not. So for now, expect an empty inner # SSHConfig._config from an un-.parse()d such object. (AFAIK, such # objects work fine re: .lookup, .get_hostnames etc.) assert type(c.base_ssh_config) is SSHConfig assert c.base_ssh_config._config == [] def object_can_be_given_explicitly_via_ssh_config_kwarg(self): sc = SSHConfig() assert Config(ssh_config=sc).base_ssh_config is sc @patch.object(Config, "_load_ssh_file") def when_config_obj_given_default_paths_are_not_sought(self, method): sc = SSHConfig() Config(ssh_config=sc) assert not method.called @patch.object(Config, "_load_ssh_file") def config_obj_prevents_loading_runtime_path_too(self, method): sc = SSHConfig() Config(ssh_config=sc, runtime_ssh_path=self._system_path) assert not method.called @patch.object(Config, "_load_ssh_file") def when_runtime_path_given_other_paths_are_not_sought(self, method): Config(runtime_ssh_path=self._runtime_path) method.assert_called_once_with(self._runtime_path) @patch.object(Config, "_load_ssh_file") def runtime_path_can_be_given_via_config_itself(self, method): Config(overrides={"ssh_config_path": self._runtime_path}) method.assert_called_once_with(self._runtime_path) def runtime_path_does_not_die_silently(self): try: Config(runtime_ssh_path="sure/thing/boss/whatever/you/say") except IOError as e: assert "No such file or directory" in str(e) assert e.errno == errno.ENOENT else: assert False, "Bad runtime path didn't raise IOError!" # TODO: skip on windows @patch.object(Config, "_load_ssh_file") def default_file_paths_match_openssh(self, method): Config() method.assert_has_calls( [call(expanduser("~/.ssh/config")), call("/etc/ssh/ssh_config")] ) def system_path_loads_ok(self): c = Config( **dict(self._empty_kwargs, system_ssh_path=self._system_path) ) names = c.base_ssh_config.get_hostnames() assert names == {"system", "shared", "*"} def user_path_loads_ok(self): c = Config(**dict(self._empty_kwargs, user_ssh_path=self._user_path)) names = c.base_ssh_config.get_hostnames() assert names == {"user", "shared", "*"} def both_paths_loaded_if_both_exist_with_user_winning(self): c = Config( user_ssh_path=self._user_path, system_ssh_path=self._system_path ) names = c.base_ssh_config.get_hostnames() expected = {"user", "system", "shared", "*"} assert names == expected # Expect the user value (321), not the system one (123) assert c.base_ssh_config.lookup("shared")["port"] == "321" @patch.object(Config, "_load_ssh_file") @patch("fabric.config.os.path.exists", lambda x: True) def runtime_path_subject_to_user_expansion(self, method): # TODO: other expansion types? no real need for abspath... tilded = "~/probably/not/real/tho" Config(runtime_ssh_path=tilded) method.assert_called_once_with(expanduser(tilded)) @patch.object(Config, "_load_ssh_file") def user_path_subject_to_user_expansion(self, method): # TODO: other expansion types? no real need for abspath... tilded = "~/probably/not/real/tho" Config(user_ssh_path=tilded) method.assert_any_call(expanduser(tilded)) class core_ssh_load_option_allows_skipping_ssh_config_loading: @patch.object(Config, "_load_ssh_file") def skips_default_paths(self, method): Config(overrides={"load_ssh_configs": False}) assert not method.called @patch.object(Config, "_load_ssh_file") def does_not_affect_explicit_object(self, method): sc = SSHConfig() c = Config(ssh_config=sc, overrides={"load_ssh_configs": False}) # Implicit loading still doesn't happen...sanity check assert not method.called # Real test: the obj we passed in is present as usual assert c.base_ssh_config is sc @patch.object(Config, "_load_ssh_file") def does_not_skip_loading_runtime_path(self, method): Config( runtime_ssh_path=self._runtime_path, overrides={"load_ssh_configs": False}, ) # Expect that loader method did still run (and, as usual, that # it did not load any other files) method.assert_called_once_with(self._runtime_path) class lazy_loading_and_explicit_methods: @patch.object(Config, "_load_ssh_file") def may_use_lazy_plus_explicit_methods_to_control_flow(self, method): c = Config(lazy=True) assert not method.called c.set_runtime_ssh_path(self._runtime_path) c.load_ssh_config() method.assert_called_once_with(self._runtime_path) fabric-2.6.0/tests/conftest.py000066400000000000000000000020621400143053200162760ustar00rootroot00000000000000# flake8: noqa from fabric.testing.fixtures import client, remote, sftp, sftp_objs, transfer from os.path import isfile, expanduser from pytest import fixture from mock import patch # TODO: does this want to end up in the public fixtures module too? @fixture(autouse=True) def no_user_ssh_config(): """ Cowardly refuse to ever load what looks like user SSH config paths. Prevents the invoking user's real config from gumming up test results or inflating test runtime (eg if it sets canonicalization on, which will incur DNS lookups for nearly all of this suite's bogus names). """ # An ugly, but effective, hack. I am not proud. I also don't see anything # that's >= as bulletproof and less ugly? # TODO: ideally this should expand to cover system config paths too, but # that's even less likely to be an issue. def no_config_for_you(path): if path == expanduser("~/.ssh/config"): return False return isfile(path) with patch("fabric.config.os.path.isfile", no_config_for_you): yield fabric-2.6.0/tests/connection.py000066400000000000000000001514351400143053200166210ustar00rootroot00000000000000from itertools import chain, repeat try: from invoke.vendor.six import b except ImportError: from six import b import errno from os.path import join import socket import time from mock import patch, Mock, call, ANY from paramiko.client import SSHClient, AutoAddPolicy from paramiko import SSHConfig import pytest # for mark from pytest import skip, param from pytest_relaxed import raises from invoke.vendor.lexicon import Lexicon from invoke.config import Config as InvokeConfig from invoke.exceptions import ThreadException from fabric import Config, Connection from fabric.exceptions import InvalidV1Env from fabric.util import get_local_user from _util import support, faux_v1_env # Remote is woven in as a config default, so must be patched there remote_path = "fabric.config.Remote" def _select_result(obj): """ Return iterator/generator suitable for mocking a select.select() call. Specifically one that has a single initial return value of ``obj``, and then empty results thereafter. If ``obj`` is an exception, it will be used as the sole initial ``side_effect`` (as opposed to a return value among tuples). """ # select.select() returns three N-tuples. Have it just act like a single # read event happened, then quiet after. So chain a single-item iterable to # a repeat(). (Mock has no built-in way to do this apparently.) initial = [(obj,), tuple(), tuple()] if isinstance(obj, Exception) or ( isinstance(obj, type) and issubclass(obj, Exception) ): initial = obj return chain([initial], repeat([tuple(), tuple(), tuple()])) class Connection_: class basic_attributes: def is_connected_defaults_to_False(self): assert Connection("host").is_connected is False def client_defaults_to_a_new_SSHClient(self): c = Connection("host").client assert isinstance(c, SSHClient) assert c.get_transport() is None class known_hosts_behavior: def defaults_to_auto_add(self): # TODO: change Paramiko API so this isn't a private access # TODO: maybe just merge with the __init__ test that is similar assert isinstance(Connection("host").client._policy, AutoAddPolicy) class init: "__init__" class host: @raises(TypeError) def is_required(self): Connection() def is_exposed_as_attribute(self): assert Connection("host").host == "host" # buffalo buffalo def may_contain_user_shorthand(self): c = Connection("user@host") assert c.host == "host" assert c.user == "user" def may_contain_port_shorthand(self): c = Connection("host:123") assert c.host == "host" assert c.port == 123 def may_contain_user_and_port_shorthand(self): c = Connection("user@host:123") assert c.host == "host" assert c.user == "user" assert c.port == 123 def ipv6_addresses_work_ok_but_avoid_port_shorthand(self): for addr in ("2001:DB8:0:0:0:0:0:1", "2001:DB8::1", "::1"): c = Connection(addr, port=123) assert c.user == get_local_user() assert c.host == addr assert c.port == 123 c2 = Connection("somebody@{}".format(addr), port=123) assert c2.user == "somebody" assert c2.host == addr assert c2.port == 123 class user: def defaults_to_local_user_with_no_config(self): # Tautology-tastic! assert Connection("host").user == get_local_user() def accepts_config_user_option(self): config = Config(overrides={"user": "nobody"}) assert Connection("host", config=config).user == "nobody" def may_be_given_as_kwarg(self): assert Connection("host", user="somebody").user == "somebody" @raises(ValueError) def errors_when_given_as_both_kwarg_and_shorthand(self): Connection("user@host", user="otheruser") def kwarg_wins_over_config(self): config = Config(overrides={"user": "nobody"}) cxn = Connection("host", user="somebody", config=config) assert cxn.user == "somebody" def shorthand_wins_over_config(self): config = Config(overrides={"user": "nobody"}) cxn = Connection("somebody@host", config=config) assert cxn.user == "somebody" class port: def defaults_to_22_because_yup(self): assert Connection("host").port == 22 def accepts_configuration_port(self): config = Config(overrides={"port": 2222}) assert Connection("host", config=config).port == 2222 def may_be_given_as_kwarg(self): assert Connection("host", port=2202).port == 2202 @raises(ValueError) def errors_when_given_as_both_kwarg_and_shorthand(self): Connection("host:123", port=321) def kwarg_wins_over_config(self): config = Config(overrides={"port": 2222}) cxn = Connection("host", port=123, config=config) assert cxn.port == 123 def shorthand_wins_over_config(self): config = Config(overrides={"port": 2222}) cxn = Connection("host:123", config=config) assert cxn.port == 123 class forward_agent: def defaults_to_False(self): assert Connection("host").forward_agent is False def accepts_configuration_value(self): config = Config(overrides={"forward_agent": True}) assert Connection("host", config=config).forward_agent is True def may_be_given_as_kwarg(self): cxn = Connection("host", forward_agent=True) assert cxn.forward_agent is True def kwarg_wins_over_config(self): config = Config(overrides={"forward_agent": True}) cxn = Connection("host", forward_agent=False, config=config) assert cxn.forward_agent is False class connect_timeout: def defaults_to_None(self): assert Connection("host").connect_timeout is None def accepts_configuration_value(self): config = Config(overrides={"timeouts": {"connect": 10}}) assert Connection("host", config=config).connect_timeout == 10 def may_be_given_as_kwarg(self): cxn = Connection("host", connect_timeout=15) assert cxn.connect_timeout == 15 def kwarg_wins_over_config(self): config = Config(overrides={"timeouts": {"connect": 20}}) cxn = Connection("host", connect_timeout=100, config=config) assert cxn.connect_timeout == 100 class config: # NOTE: behavior local to Config itself is tested in its own test # module; below is solely about Connection's config kwarg and its # handling of that value def is_not_required(self): assert Connection("host").config.__class__ == Config def can_be_specified(self): c = Config(overrides={"user": "me", "custom": "option"}) config = Connection("host", config=c).config assert c is config assert config["user"] == "me" assert config["custom"] == "option" def if_given_an_invoke_Config_we_upgrade_to_our_own_Config(self): # Scenario: user has Fabric-level data present at vanilla # Invoke config level, and is then creating Connection objects # with those vanilla invoke Configs. # (Could also _not_ have any Fabric-level data, but then that's # just a base case...) # TODO: adjust this if we ever switch to all our settings being # namespaced... vanilla = InvokeConfig(overrides={"forward_agent": True}) cxn = Connection("host", config=vanilla) assert cxn.forward_agent is True # not False, which is default class gateway: def is_optional_and_defaults_to_None(self): c = Connection(host="host") assert c.gateway is None def takes_a_Connection(self): c = Connection("host", gateway=Connection("otherhost")) assert isinstance(c.gateway, Connection) assert c.gateway.host == "otherhost" def takes_a_string(self): c = Connection("host", gateway="meh") assert c.gateway == "meh" def accepts_configuration_value(self): gw = Connection("jumpbox") config = Config(overrides={"gateway": gw}) # TODO: the fact that they will be eq, but _not_ necessarily be # the same object, could be problematic in some cases... cxn = Connection("host", config=config) assert cxn.gateway == gw class initializes_client: @patch("fabric.connection.SSHClient") def instantiates_empty_SSHClient(self, Client): Connection("host") Client.assert_called_once_with() @patch("fabric.connection.AutoAddPolicy") def sets_missing_host_key_policy(self, Policy, client): # TODO: should make the policy configurable early on sentinel = Mock() Policy.return_value = sentinel Connection("host") set_policy = client.set_missing_host_key_policy set_policy.assert_called_once_with(sentinel) def is_made_available_as_client_attr(self, client): # NOTE: client is SSHClient.return_value assert Connection("host").client is client class ssh_config: def _runtime_config(self, overrides=None, basename="runtime"): confname = "{}.conf".format(basename) runtime_path = join(support, "ssh_config", confname) if overrides is None: overrides = {} return Config( runtime_ssh_path=runtime_path, overrides=overrides ) def _runtime_cxn(self, **kwargs): config = self._runtime_config(**kwargs) return Connection("runtime", config=config) def effectively_blank_when_no_loaded_config(self): c = Config(ssh_config=SSHConfig()) cxn = Connection("host", config=c) # NOTE: paramiko always injects this even if you look up a host # that has no rules, even wildcard ones. assert cxn.ssh_config == {"hostname": "host"} def shows_result_of_lookup_when_loaded_config(self): conf = self._runtime_cxn().ssh_config expected = { "connecttimeout": "15", "forwardagent": "yes", "hostname": "runtime", "identityfile": ["whatever.key", "some-other.key"], "port": "666", "proxycommand": "my gateway", "user": "abaddon", } assert conf == expected class hostname: def original_host_always_set(self): cxn = Connection("somehost") assert cxn.original_host == "somehost" assert cxn.host == "somehost" def hostname_directive_overrides_host_attr(self): # TODO: not 100% convinced this is the absolute most # obvious API for 'translation' of given hostname to # ssh-configured hostname, but it feels okay for now. path = join( support, "ssh_config", "overridden_hostname.conf" ) config = Config(runtime_ssh_path=path) cxn = Connection("aliasname", config=config) assert cxn.host == "realname" assert cxn.original_host == "aliasname" assert cxn.port == 2222 class user: def wins_over_default(self): assert self._runtime_cxn().user == "abaddon" def wins_over_configuration(self): cxn = self._runtime_cxn(overrides={"user": "baal"}) assert cxn.user == "abaddon" def loses_to_explicit(self): # Would be 'abaddon', as above config = self._runtime_config() cxn = Connection("runtime", config=config, user="set") assert cxn.user == "set" class port: def wins_over_default(self): assert self._runtime_cxn().port == 666 def wins_over_configuration(self): cxn = self._runtime_cxn(overrides={"port": 777}) assert cxn.port == 666 def loses_to_explicit(self): config = self._runtime_config() # Would be 666, as above cxn = Connection("runtime", config=config, port=777) assert cxn.port == 777 class forward_agent: def wins_over_default(self): assert self._runtime_cxn().forward_agent is True def wins_over_configuration(self): # Of course, this "config override" is also the same as the # default. Meh. cxn = self._runtime_cxn(overrides={"forward_agent": False}) assert cxn.forward_agent is True def loses_to_explicit(self): # Would be True, as above config = self._runtime_config() cxn = Connection( "runtime", config=config, forward_agent=False ) assert cxn.forward_agent is False class proxy_command: def wins_over_default(self): assert self._runtime_cxn().gateway == "my gateway" def wins_over_configuration(self): cxn = self._runtime_cxn(overrides={"gateway": "meh gw"}) assert cxn.gateway == "my gateway" def loses_to_explicit(self): # Would be "my gateway", as above config = self._runtime_config() cxn = Connection( "runtime", config=config, gateway="other gateway" ) assert cxn.gateway == "other gateway" def explicit_False_turns_off_feature(self): # This isn't as necessary for things like user/port, which # _may not_ be None in the end - this setting could be. config = self._runtime_config() cxn = Connection("runtime", config=config, gateway=False) assert cxn.gateway is False class proxy_jump: def setup(self): self._expected_gw = Connection("jumpuser@jumphost:373") def wins_over_default(self): cxn = self._runtime_cxn(basename="proxyjump") assert cxn.gateway == self._expected_gw def wins_over_configuration(self): cxn = self._runtime_cxn( basename="proxyjump", overrides={"gateway": "meh gw"} ) assert cxn.gateway == self._expected_gw def loses_to_explicit(self): # Would be a Connection equal to self._expected_gw, as # above config = self._runtime_config(basename="proxyjump") cxn = Connection( "runtime", config=config, gateway="other gateway" ) assert cxn.gateway == "other gateway" def explicit_False_turns_off_feature(self): config = self._runtime_config(basename="proxyjump") cxn = Connection("runtime", config=config, gateway=False) assert cxn.gateway is False def wins_over_proxycommand(self): cxn = self._runtime_cxn(basename="both_proxies") assert cxn.gateway == Connection("winner@everything:777") def multi_hop_works_ok(self): cxn = self._runtime_cxn(basename="proxyjump_multi") innermost = cxn.gateway.gateway.gateway middle = cxn.gateway.gateway outermost = cxn.gateway assert innermost == Connection("jumpuser3@jumphost3:411") assert middle == Connection("jumpuser2@jumphost2:872") assert outermost == Connection("jumpuser@jumphost:373") def wildcards_do_not_trigger_recursion(self): # When #1850 is present, this will RecursionError. conf = self._runtime_config(basename="proxyjump_recursive") cxn = Connection("runtime.tld", config=conf) assert cxn.gateway == Connection("bastion.tld") assert cxn.gateway.gateway is None def multihop_plus_wildcards_still_no_recursion(self): conf = self._runtime_config( basename="proxyjump_multi_recursive" ) cxn = Connection("runtime.tld", config=conf) outer = cxn.gateway inner = cxn.gateway.gateway assert outer == Connection("bastion1.tld") assert inner == Connection("bastion2.tld") assert inner.gateway is None def gateway_Connections_get_parent_connection_configs(self): conf = self._runtime_config( basename="proxyjump", overrides={"some_random_option": "a-value"}, ) cxn = Connection("runtime", config=conf) # Sanity assert cxn.config is conf assert cxn.gateway == self._expected_gw # Real check assert cxn.gateway.config.some_random_option == "a-value" # Prove copy not reference # TODO: would we ever WANT a reference? can't imagine... assert cxn.gateway.config is not conf class connect_timeout: def wins_over_default(self): assert self._runtime_cxn().connect_timeout == 15 def wins_over_configuration(self): cxn = self._runtime_cxn( overrides={"timeouts": {"connect": 17}} ) assert cxn.connect_timeout == 15 def loses_to_explicit(self): config = self._runtime_config() cxn = Connection( "runtime", config=config, connect_timeout=23 ) assert cxn.connect_timeout == 23 class identity_file: # NOTE: ssh_config value gets merged w/ (instead of overridden # by) config and kwarg values; that is tested in the tests for # open(). def basic_loading_of_value(self): # By default, key_filename will be empty, and the data from # the runtime ssh config will be all that appears. value = self._runtime_cxn().connect_kwargs["key_filename"] assert value == ["whatever.key", "some-other.key"] class connect_kwargs: def defaults_to_empty_dict(self): assert Connection("host").connect_kwargs == {} def may_be_given_explicitly(self): cxn = Connection("host", connect_kwargs={"foo": "bar"}) assert cxn.connect_kwargs == {"foo": "bar"} def may_be_configured(self): c = Config(overrides={"connect_kwargs": {"origin": "config"}}) cxn = Connection("host", config=c) assert cxn.connect_kwargs == {"origin": "config"} def kwarg_wins_over_config(self): # TODO: should this be more of a merge-down? c = Config(overrides={"connect_kwargs": {"origin": "config"}}) cxn = Connection( "host", connect_kwargs={"origin": "kwarg"}, config=c ) assert cxn.connect_kwargs == {"origin": "kwarg"} class inline_ssh_env: def defaults_to_config_value(self): assert Connection("host").inline_ssh_env is False config = Config({"inline_ssh_env": True}) assert Connection("host", config=config).inline_ssh_env is True def may_be_given(self): assert Connection("host").inline_ssh_env is False cxn = Connection("host", inline_ssh_env=True) assert cxn.inline_ssh_env is True class from_v1: def setup(self): self.env = faux_v1_env() def _cxn(self, **kwargs): self.env.update(kwargs) return Connection.from_v1(self.env) def must_be_given_explicit_env_arg(self): cxn = Connection.from_v1(self.env) assert cxn.host == "localghost" class obtaining_config: @patch("fabric.connection.Config.from_v1") def defaults_to_calling_Config_from_v1(self, Config_from_v1): Connection.from_v1(self.env) Config_from_v1.assert_called_once_with(self.env) @patch("fabric.connection.Config.from_v1") def may_be_given_config_explicitly(self, Config_from_v1): # Arguably a dupe of regular Connection constructor behavior, # but whatever. Connection.from_v1(env=self.env, config=Config()) assert not Config_from_v1.called class additional_kwargs: # I.e. as opposed to what happens to the 'env' kwarg... def forwards_arbitrary_kwargs_to_init(self): cxn = Connection.from_v1( self.env, connect_kwargs={"foo": "bar"}, inline_ssh_env=True, connect_timeout=15, ) assert cxn.connect_kwargs["foo"] == "bar" assert cxn.inline_ssh_env is True assert cxn.connect_timeout == 15 def conflicting_kwargs_win_over_v1_env_values(self): env = Lexicon(self.env) cxn = Connection.from_v1( env, host="not-localghost", port=2222, user="remoteuser" ) assert cxn.host == "not-localghost" assert cxn.user == "remoteuser" assert cxn.port == 2222 class var_mappings: def host_string(self): cxn = self._cxn() # default is 'localghost' assert cxn.host == "localghost" @raises(InvalidV1Env) def None_host_string_errors_usefully(self): self._cxn(host_string=None) def user(self): cxn = self._cxn(user="space") assert cxn.user == "space" class port: def basic(self): cxn = self._cxn(port=2222) assert cxn.port == 2222 def casted_to_int(self): cxn = self._cxn(port="2222") assert cxn.port == 2222 def not_supplied_if_given_in_host_string(self): cxn = self._cxn(host_string="localghost:3737", port=2222) assert cxn.port == 3737 class string_representation: "string representations" def str_displays_repr(self): c = Connection("meh") assert str(c) == "" def displays_core_params(self): c = Connection(user="me", host="there", port=123) template = "" assert repr(c) == template def omits_default_param_values(self): c = Connection("justhost") assert repr(c) == "" def param_comparison_uses_config(self): conf = Config(overrides={"user": "zerocool"}) c = Connection( user="zerocool", host="myhost", port=123, config=conf ) template = "" assert repr(c) == template def proxyjump_gateway_shows_type(self): c = Connection(host="myhost", gateway=Connection("jump")) template = "" assert repr(c) == template def proxycommand_gateway_shows_type(self): c = Connection(host="myhost", gateway="netcat is cool") template = "" assert repr(c) == template class comparison_and_hashing: def comparison_uses_host_user_and_port(self): # Just host assert Connection("host") == Connection("host") # Host + user c1 = Connection("host", user="foo") c2 = Connection("host", user="foo") assert c1 == c2 # Host + user + port c1 = Connection("host", user="foo", port=123) c2 = Connection("host", user="foo", port=123) assert c1 == c2 def comparison_to_non_Connections_is_False(self): assert Connection("host") != 15 def hashing_works(self): assert hash(Connection("host")) == hash(Connection("host")) def sorting_works(self): # Hostname... assert Connection("a-host") < Connection("b-host") # User... assert Connection("a-host", user="a-user") < Connection( "a-host", user="b-user" ) # then port... assert Connection("a-host", port=1) < Connection("a-host", port=2) class open: def has_no_required_args_and_returns_None(self, client): assert Connection("host").open() is None def calls_SSHClient_connect(self, client): "calls paramiko.SSHClient.connect() with correct args" Connection("host").open() client.connect.assert_called_with( username=get_local_user(), hostname="host", port=22 ) def passes_through_connect_kwargs(self, client): Connection("host", connect_kwargs={"foobar": "bizbaz"}).open() client.connect.assert_called_with( username=get_local_user(), hostname="host", port=22, foobar="bizbaz", ) def refuses_to_overwrite_connect_kwargs_with_others(self, client): for key, value, kwargs in ( # Core connection args should definitely not get overwritten! # NOTE: recall that these keys are the SSHClient.connect() # kwarg names, NOT our own config/kwarg names! ("hostname", "nothost", {}), ("port", 17, {}), ("username", "zerocool", {}), # These might arguably still be allowed to work, but let's head # off confusion anyways. ("timeout", 100, {"connect_timeout": 25}), ): try: Connection( "host", connect_kwargs={key: value}, **kwargs ).open() except ValueError as e: err = "Refusing to be ambiguous: connect() kwarg '{}' was given both via regular arg and via connect_kwargs!" # noqa assert str(e) == err.format(key) else: assert False, "Did not raise ValueError!" def connect_kwargs_protection_not_tripped_by_defaults(self, client): Connection("host", connect_kwargs={"timeout": 300}).open() client.connect.assert_called_with( username=get_local_user(), hostname="host", port=22, timeout=300, ) def submits_connect_timeout(self, client): Connection("host", connect_timeout=27).open() client.connect.assert_called_with( username=get_local_user(), hostname="host", port=22, timeout=27 ) def is_connected_True_when_successful(self, client): c = Connection("host") c.open() assert c.is_connected is True def short_circuits_if_already_connected(self, client): cxn = Connection("host") # First call will set self.transport to fixture's mock cxn.open() # Second call will check .is_connected which will see active==True, # and short circuit cxn.open() assert client.connect.call_count == 1 def is_connected_still_False_when_connect_fails(self, client): client.connect.side_effect = socket.error cxn = Connection("host") try: cxn.open() except socket.error: pass assert cxn.is_connected is False def uses_configured_user_host_and_port(self, client): Connection(user="myuser", host="myhost", port=9001).open() client.connect.assert_called_once_with( username="myuser", hostname="myhost", port=9001 ) # NOTE: does more involved stuff so can't use "client" fixture @patch("fabric.connection.SSHClient") def uses_gateway_channel_as_sock_for_SSHClient_connect(self, Client): "uses Connection gateway as 'sock' arg to SSHClient.connect" # Setup mock_gw = Mock() mock_main = Mock() Client.side_effect = [mock_gw, mock_main] gw = Connection("otherhost") gw.open = Mock(wraps=gw.open) main = Connection("host", gateway=gw) main.open() # Expect gateway is also open()'d gw.open.assert_called_once_with() # Expect direct-tcpip channel open on 1st client open_channel = mock_gw.get_transport.return_value.open_channel kwargs = open_channel.call_args[1] assert kwargs["kind"] == "direct-tcpip" assert kwargs["dest_addr"], "host" == 22 # Expect result of that channel open as sock arg to connect() sock_arg = mock_main.connect.call_args[1]["sock"] assert sock_arg is open_channel.return_value @patch("fabric.connection.ProxyCommand") def uses_proxycommand_as_sock_for_Client_connect(self, moxy, client): "uses ProxyCommand from gateway as 'sock' arg to SSHClient.connect" # Setup main = Connection("host", gateway="net catty %h %p") main.open() # Expect ProxyCommand instantiation moxy.assert_called_once_with("net catty host 22") # Expect result of that as sock arg to connect() sock_arg = client.connect.call_args[1]["sock"] assert sock_arg is moxy.return_value # TODO: all the various connect-time options such as agent forwarding, # host acceptance policies, how to auth, etc etc. These are all aspects # of a given session and not necessarily the same for entire lifetime # of a Connection object, should it ever disconnect/reconnect. # TODO: though some/all of those things might want to be set to # defaults at initialization time... class connect_kwargs_key_filename: "connect_kwargs(key_filename=...)" # TODO: it'd be nice to truly separate CLI from regular (non override # level) invoke config; as it is, invoke config comes first in expected # outputs since otherwise there's no way for --identity to "come # first". @pytest.mark.parametrize( "ssh, invoke, kwarg, expected", [ param( True, True, True, [ "configured.key", "kwarg.key", "ssh-config-B.key", "ssh-config-A.key", ], id="All sources", ), param(False, False, False, [], id="No sources"), param( True, False, False, ["ssh-config-B.key", "ssh-config-A.key"], id="ssh_config only", ), param( False, True, False, ["configured.key"], id="Invoke-level config only", ), param( False, False, True, ["kwarg.key"], id="Connection kwarg only", ), param( True, True, False, ["configured.key", "ssh-config-B.key", "ssh-config-A.key"], id="ssh_config + invoke config, no kwarg", ), param( True, False, True, ["kwarg.key", "ssh-config-B.key", "ssh-config-A.key"], id="ssh_config + kwarg, no Invoke-level config", ), param( False, True, True, ["configured.key", "kwarg.key"], id="Invoke-level config + kwarg, no ssh_config", ), ], ) def merges_sources(self, client, ssh, invoke, kwarg, expected): config_kwargs = {} if ssh: # SSH config with 2x IdentityFile directives. config_kwargs["runtime_ssh_path"] = join( support, "ssh_config", "runtime_identity.conf" ) if invoke: # Use overrides config level to mimic --identity use NOTE: (the # fact that --identity is an override, and thus overrides eg # invoke config file values is part of invoke's config test # suite) config_kwargs["overrides"] = { "connect_kwargs": {"key_filename": ["configured.key"]} } conf = Config(**config_kwargs) connect_kwargs = {} if kwarg: # Stitch in connect_kwargs value connect_kwargs = {"key_filename": ["kwarg.key"]} # Tie in all sources that were configured & open() Connection( "runtime", config=conf, connect_kwargs=connect_kwargs ).open() # Ensure we got the expected list of keys kwargs = client.connect.call_args[1] if expected: assert kwargs["key_filename"] == expected else: # No key filenames -> it's not even passed in as connect_kwargs # is gonna be a blank dict assert "key_filename" not in kwargs class close: def has_no_required_args_and_returns_None(self, client): c = Connection("host") c.open() assert c.close() is None def calls_SSHClient_close(self, client): "calls paramiko.SSHClient.close()" c = Connection("host") c.open() c.close() client.close.assert_called_with() @patch("fabric.connection.AgentRequestHandler") def calls_agent_handler_close_if_enabled(self, Handler, client): c = Connection("host", forward_agent=True) c.create_session() c.close() # NOTE: this will need to change if, for w/e reason, we ever want # to run multiple handlers at once Handler.return_value.close.assert_called_once_with() def short_circuits_if_not_connected(self, client): c = Connection("host") # Won't trigger close() on client because it'll already think it's # closed (due to no .transport & the behavior of .is_connected) c.close() assert not client.close.called def class_works_as_a_closing_contextmanager(self, client): with Connection("host") as c: c.open() client.close.assert_called_once_with() class create_session: def calls_open_for_you(self, client): c = Connection("host") c.open = Mock() c.transport = Mock() # so create_session no asplode c.create_session() assert c.open.called @patch("fabric.connection.AgentRequestHandler") def activates_paramiko_agent_forwarding_if_configured( self, Handler, client ): c = Connection("host", forward_agent=True) chan = c.create_session() Handler.assert_called_once_with(chan) class run: # NOTE: most actual run related tests live in the runners module's # tests. Here we are just testing the outer interface a bit. @patch(remote_path) def calls_open_for_you(self, Remote, client): c = Connection("host") c.open = Mock() c.run("command") assert c.open.called @patch(remote_path) def passes_inline_env_to_Remote(self, Remote, client): Connection("host").run("command") assert Remote.call_args[1]["inline_env"] is False Connection("host", inline_ssh_env=True).run("command") assert Remote.call_args[1]["inline_env"] is True @patch(remote_path) def calls_Remote_run_with_command_and_kwargs_and_returns_its_result( self, Remote, client ): remote = Remote.return_value sentinel = object() remote.run.return_value = sentinel c = Connection("host") r1 = c.run("command") r2 = c.run("command", warn=True, hide="stderr") # NOTE: somehow, .call_args & the methods built on it (like # .assert_called_with()) stopped working, apparently triggered by # our code...somehow...after commit (roughly) 80906c7. # And yet, .call_args_list and its brethren work fine. Wha? Remote.assert_any_call(c, inline_env=False) remote.run.assert_has_calls( [call("command"), call("command", warn=True, hide="stderr")] ) for r in (r1, r2): assert r is sentinel class local: # NOTE: most tests for this functionality live in Invoke's runner # tests. @patch("invoke.config.Local") def calls_invoke_Local_run(self, Local): Connection("host").local("foo") # NOTE: yet another casualty of the bizarre mock issues assert call().run("foo") in Local.mock_calls class sudo: @patch(remote_path) def calls_open_for_you(self, Remote, client): c = Connection("host") c.open = Mock() c.sudo("command") assert c.open.called @patch(remote_path) def passes_inline_env_to_Remote(self, Remote, client): Connection("host").sudo("command") assert Remote.call_args[1]["inline_env"] is False Connection("host", inline_ssh_env=True).sudo("command") assert Remote.call_args[1]["inline_env"] is True @patch(remote_path) def basic_invocation(self, Remote, client): # Technically duplicates Invoke-level tests, but ensures things # still work correctly at our level. cxn = Connection("host") cxn.sudo("foo") cmd = "sudo -S -p '{}' foo".format(cxn.config.sudo.prompt) # NOTE: this is another spot where Mock.call_args is inexplicably # None despite call_args_list being populated. WTF. (Also, # Remote.return_value is two different Mocks now, despite Remote's # own Mock having the same ID here and in code under test. WTF!!) expected = [ call(cxn, inline_env=False), call().run(cmd, watchers=ANY), ] assert Remote.mock_calls == expected # NOTE: we used to have a "sudo return value is literally the same # return value from Remote.run()" sanity check here, which is # completely impossible now thanks to the above issue. def per_host_password_works_as_expected(self): # TODO: needs clearly defined "per-host" config API, if a distinct # one is necessary besides "the config obj handed in when # instantiating the Connection". # E.g. generate a Connection pulling in a sudo.password value from # what would be a generic conf file or similar, *and* one more # specific to that particular Connection (perhaps simply the # 'override' level?), w/ test asserting the more-specific value is # what's submitted. skip() class sftp: def returns_result_of_client_open_sftp(self, client): "returns result of client.open_sftp()" sentinel = object() client.open_sftp.return_value = sentinel assert Connection("host").sftp() == sentinel client.open_sftp.assert_called_with() def lazily_caches_result(self, client): sentinel1, sentinel2 = object(), object() client.open_sftp.side_effect = [sentinel1, sentinel2] cxn = Connection("host") first = cxn.sftp() # TODO: why aren't we just asserting about calls of open_sftp??? err = "{0!r} wasn't the sentinel object()!" assert first is sentinel1, err.format(first) second = cxn.sftp() assert second is sentinel1, err.format(second) class get: @patch("fabric.connection.Transfer") def calls_Transfer_get(self, Transfer): "calls Transfer.get()" c = Connection("host") c.get("meh") Transfer.assert_called_with(c) Transfer.return_value.get.assert_called_with("meh") class put: @patch("fabric.connection.Transfer") def calls_Transfer_put(self, Transfer): "calls Transfer.put()" c = Connection("host") c.put("meh") Transfer.assert_called_with(c) Transfer.return_value.put.assert_called_with("meh") class forward_local: @patch("fabric.tunnels.select") @patch("fabric.tunnels.socket.socket") @patch("fabric.connection.SSHClient") def _forward_local(self, kwargs, Client, mocket, select): # Tease out bits of kwargs for use in the mocking/expecting. # But leave it alone for raw passthru to the API call itself. # TODO: unhappy with how much this apes the real code & its sig... local_port = kwargs["local_port"] remote_port = kwargs.get("remote_port", local_port) local_host = kwargs.get("local_host", "localhost") remote_host = kwargs.get("remote_host", "localhost") # These aren't part of the real sig, but this is easier than trying # to reconcile the mock decorators + optional-value kwargs. meh. tunnel_exception = kwargs.pop("tunnel_exception", None) listener_exception = kwargs.pop("listener_exception", False) # Mock setup client = Client.return_value listener_sock = Mock(name="listener_sock") if listener_exception: listener_sock.bind.side_effect = listener_exception data = b("Some data") tunnel_sock = Mock(name="tunnel_sock", recv=lambda n: data) local_addr = Mock() transport = client.get_transport.return_value channel = transport.open_channel.return_value # socket.socket is only called once directly mocket.return_value = listener_sock # The 2nd socket is obtained via an accept() (which should only # fire once & raise EAGAIN after) listener_sock.accept.side_effect = chain( [(tunnel_sock, local_addr)], repeat(socket.error(errno.EAGAIN, "nothing yet")), ) obj = tunnel_sock if tunnel_exception is None else tunnel_exception select.select.side_effect = _select_result(obj) with Connection("host").forward_local(**kwargs): # Make sure we give listener thread enough time to boot up :( # Otherwise we might assert before it does things. (NOTE: # doesn't need to be much, even at 0.01s, 0/100 trials failed # (vs 45/100 with no sleep) time.sleep(0.015) assert client.connect.call_args[1]["hostname"] == "host" listener_sock.setsockopt.assert_called_once_with( socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 ) listener_sock.setblocking.assert_called_once_with(0) listener_sock.bind.assert_called_once_with( (local_host, local_port) ) if not listener_exception: listener_sock.listen.assert_called_once_with(1) transport.open_channel.assert_called_once_with( "direct-tcpip", (remote_host, remote_port), local_addr ) # Local write to tunnel_sock is implied by its mocked-out # recv() call above... # NOTE: don't assert if explodey; we want to mimic "the only # error that occurred was within the thread" behavior being # tested by thread-exception-handling tests if not (tunnel_exception or listener_exception): channel.sendall.assert_called_once_with(data) # Shutdown, with another sleep because threads. time.sleep(0.015) if not listener_exception: tunnel_sock.close.assert_called_once_with() channel.close.assert_called_once_with() listener_sock.close.assert_called_once_with() def forwards_local_port_to_remote_end(self): self._forward_local({"local_port": 1234}) def distinct_remote_port(self): self._forward_local({"local_port": 1234, "remote_port": 4321}) def non_localhost_listener(self): self._forward_local( {"local_port": 1234, "local_host": "nearby_local_host"} ) def non_remote_localhost_connection(self): self._forward_local( {"local_port": 1234, "remote_host": "nearby_remote_host"} ) def _thread_error(self, which): class Sentinel(Exception): pass try: self._forward_local( { "local_port": 1234, "{}_exception".format(which): Sentinel, } ) except ThreadException as e: # NOTE: ensures that we're getting what we expected and not # some deeper, test-bug related error assert len(e.exceptions) == 1 inner = e.exceptions[0] err = "Expected wrapped exception to be Sentinel, was {}" assert inner.type is Sentinel, err.format(inner.type.__name__) else: # no exception happened :( implies the thread went boom but # nobody noticed err = "Failed to get ThreadException on {} error" assert False, err.format(which) def tunnel_errors_bubble_up(self): self._thread_error("tunnel") def tunnel_manager_errors_bubble_up(self): self._thread_error("listener") # TODO: these require additional refactoring of _forward_local to be # more like the decorators in _util def multiple_tunnels_can_be_open_at_once(self): skip() class forward_remote: @patch("fabric.connection.socket.socket") @patch("fabric.tunnels.select") @patch("fabric.connection.SSHClient") def _forward_remote(self, kwargs, Client, select, mocket): # TODO: unhappy with how much this duplicates of the code under # test, re: sig/default vals # Set up parameter values/defaults remote_port = kwargs["remote_port"] remote_host = kwargs.get("remote_host", "127.0.0.1") local_port = kwargs.get("local_port", remote_port) local_host = kwargs.get("local_host", "localhost") # Mock/etc setup, anything that can be prepped before the forward # occurs (which is most things) tun_socket = mocket.return_value cxn = Connection("host") # Channel that will yield data when read from chan = Mock() chan.recv.return_value = "data" # And make select() yield it as being ready once, when called select.select.side_effect = _select_result(chan) with cxn.forward_remote(**kwargs): # At this point Connection.open() has run and generated a # Transport mock for us (because SSHClient is mocked). Let's # first make sure we asked it for the port forward... # NOTE: this feels like it's too limited/tautological a test, # until you realize that it's functionally impossible to mock # out everything required for Paramiko's inner guts to run # _parse_channel_open() and suchlike :( call = cxn.transport.request_port_forward.call_args_list[0] assert call[1]["address"] == remote_host assert call[1]["port"] == remote_port # Pretend the Transport called our callback with mock Channel call[1]["handler"](chan, tuple(), tuple()) # Then have to sleep a bit to make sure we give the tunnel # created by that callback to spin up; otherwise ~5% of the # time we exit the contextmanager so fast, the tunnel's "you're # done!" flag is set before it even gets a chance to select() # once. time.sleep(0.01) # And make sure we hooked up to the local socket OK tup = (local_host, local_port) tun_socket.connect.assert_called_once_with(tup) # Expect that our socket got written to by the tunnel (due to the # above-setup select() and channel mocking). Need to do this after # tunnel shutdown or we risk thread ordering issues. tun_socket.sendall.assert_called_once_with("data") # Ensure we closed down the mock socket mocket.return_value.close.assert_called_once_with() # And that the transport canceled the port forward on the remote # end. assert cxn.transport.cancel_port_forward.call_count == 1 def forwards_remote_port_to_local_end(self): self._forward_remote({"remote_port": 1234}) def distinct_local_port(self): self._forward_remote({"remote_port": 1234, "local_port": 4321}) def non_localhost_connections(self): self._forward_remote( {"remote_port": 1234, "local_host": "nearby_local_host"} ) def remote_non_localhost_listener(self): self._forward_remote( {"remote_port": 1234, "remote_host": "192.168.1.254"} ) # TODO: these require additional refactoring of _forward_remote to be # more like the decorators in _util def multiple_tunnels_can_be_open_at_once(self): skip() def tunnel_errors_bubble_up(self): skip() def listener_errors_bubble_up(self): skip() fabric-2.6.0/tests/executor.py000066400000000000000000000125061400143053200163130ustar00rootroot00000000000000from invoke import Collection, Context, Call, Task as InvokeTask from invoke.parser import ParseResult, ParserContext, Argument from fabric import Executor, Task, Connection from fabric.executor import ConnectionCall from fabric.exceptions import NothingToDo from mock import Mock from pytest import skip, raises # noqa def _get_executor(hosts_flag=None, hosts_kwarg=None, post=None, remainder=""): post_tasks = [] if post is not None: post_tasks.append(post) hosts = Argument(name="hosts") if hosts_flag is not None: hosts.value = hosts_flag core_args = ParseResult([ParserContext(args=[hosts])]) core_args.remainder = remainder body = Mock(pre=[], post=[]) task = Task(body, post=post_tasks, hosts=hosts_kwarg) coll = Collection(mytask=task) return body, Executor(coll, core=core_args) def _execute(**kwargs): invocation = kwargs.pop("invocation", ["mytask"]) task, executor = _get_executor(**kwargs) executor.execute(*invocation) return task class Executor_: class expand_calls: class hosts_flag_empty: def no_parameterization_is_done(self): task = _execute() assert task.call_count == 1 assert isinstance(task.call_args[0][0], Context) class hosts_flag_set: def parameterization_per_host(self): task = _execute(hosts_flag="host1,host2,host3") assert task.call_count == 3 assert isinstance(task.call_args[0][0], Connection) def post_tasks_happen_once_only(self): post = Mock() task = _execute( hosts_flag="host1,host2,host3", post=Task(post) ) assert task.call_count == 3 assert post.call_count == 1 class hosts_attribute_on_task_objects: def parameterization_per_host(self): task = _execute(hosts_kwarg=["host1", "host2", "host3"]) assert task.call_count == 3 assert isinstance(task.call_args[0][0], Connection) def post_tasks_happen_once_only(self): post = Mock() task = _execute( hosts_kwarg=["host1", "host2", "host3"], post=Task(post) ) assert task.call_count == 3 assert post.call_count == 1 def may_give_Connection_kwargs_as_values(self): task = _execute( hosts_kwarg=[ {"host": "host1"}, {"host": "host2", "user": "doge"}, ] ) assert task.call_count == 2 expected = [ Connection("host1"), Connection("host2", user="doge"), ] assert [x[0][0] for x in task.call_args_list] == expected class Invoke_task_objects_without_hosts_attribute_still_work: def execution_happens_normally_without_parameterization(self): body = Mock(pre=[], post=[]) coll = Collection(mytask=InvokeTask(body)) hosts = Argument(name="hosts") core_args = ParseResult([ParserContext(args=[hosts])]) # When #1824 present, this just blows up because no .hosts attr Executor(coll, core=core_args).execute("mytask") assert body.call_count == 1 def hosts_flag_still_triggers_parameterization(self): body = Mock(pre=[], post=[]) coll = Collection(mytask=InvokeTask(body)) hosts = Argument(name="hosts") hosts.value = "host1,host2,host3" core_args = ParseResult([ParserContext(args=[hosts])]) Executor(coll, core=core_args).execute("mytask") assert body.call_count == 3 class hosts_flag_vs_attributes: def flag_wins(self): task = _execute( hosts_flag="via-flag", hosts_kwarg=["via-kwarg"] ) assert task.call_count == 1 assert task.call_args[0][0] == Connection(host="via-flag") class remainder: def raises_NothingToDo_when_no_hosts(self): with raises(NothingToDo): _execute(remainder="whatever") def creates_anonymous_call_per_host(self): # TODO: annoying to do w/o mucking around w/ our Executor class # more, and that stuff wants to change semi soon anyways when # we grow past --hosts; punting. skip() class dedupe: def deduplication_not_performed(self): task = _execute(invocation=["mytask", "mytask"]) assert task.call_count == 2 # not 1 class parameterize: def always_generates_ConnectionCall_with_host_attr(self): task, executor = _get_executor(hosts_flag="host1,host2,host3") calls = executor.expand_calls(calls=[Call(task)]) assert len(calls) == 3 assert all(isinstance(x, ConnectionCall) for x in calls) assert [x.init_kwargs["host"] for x in calls] == [ "host1", "host2", "host3", ] fabric-2.6.0/tests/group.py000066400000000000000000000261271400143053200156150ustar00rootroot00000000000000from mock import Mock, patch, call from pytest import mark, raises from fabric import Connection, Group, SerialGroup, ThreadingGroup, GroupResult from fabric.group import thread_worker from fabric.exceptions import GroupException RUNNER_METHODS = ("run", "sudo") TRANSFER_METHODS = ("put", "get") ALL_METHODS = RUNNER_METHODS + TRANSFER_METHODS runner_args = ("command",) runner_kwargs = dict(hide=True, warn=True) transfer_args = tuple() transfer_kwargs = dict(local="yokel", remote="goat") ARGS_BY_METHOD = dict( run=runner_args, sudo=runner_args, put=transfer_args, get=transfer_args ) KWARGS_BY_METHOD = dict( run=runner_kwargs, sudo=runner_kwargs, put=transfer_kwargs, get=transfer_kwargs, ) class Group_: class init: "__init__" def may_be_empty(self): assert len(Group()) == 0 def takes_splat_arg_of_host_strings(self): g = Group("foo", "bar") assert g[0].host == "foo" assert g[1].host == "bar" def takes_splat_kwargs_and_passes_them_to_Connections(self): g = Group("foo", "bar", user="admin", forward_agent=True) assert g[0].host == "foo" assert g[0].user == "admin" assert g[0].forward_agent is True assert g[1].host == "bar" assert g[1].user == "admin" assert g[1].forward_agent is True class from_connections: def inits_from_iterable_of_Connections(self): g = Group.from_connections((Connection("foo"), Connection("bar"))) assert len(g) == 2 assert g[1].host == "bar" def acts_like_an_iterable_of_Connections(self): g = Group("foo", "bar", "biz") assert g[0].host == "foo" assert g[-1].host == "biz" assert len(g) == 3 for c in g: assert isinstance(c, Connection) @mark.parametrize("method", ALL_METHODS) def abstract_methods_not_implemented(self, method): group = Group() with raises(NotImplementedError): getattr(group, method)() class close_and_contextmanager_behavior: def close_closes_all_member_connections(self): cxns = [Mock(name=x) for x in ("foo", "bar", "biz")] g = Group.from_connections(cxns) g.close() for c in cxns: c.close.assert_called_once_with() def contextmanager_behavior_works_like_Connection(self): cxns = [Mock(name=x) for x in ("foo", "bar", "biz")] g = Group.from_connections(cxns) with g as my_g: assert my_g is g for c in cxns: c.close.assert_called_once_with() class get: class local_defaults_to_host_interpolated_path: def when_no_arg_or_kwarg_given(self): g = Group("host1", "host2") g._do = Mock() g.get(remote="whatever") g._do.assert_called_with( "get", remote="whatever", local="{host}/" ) def not_when_arg_given(self): g = Group("host1", "host2") g._do = Mock() g.get("whatever", "lol") # No local kwarg passed. g._do.assert_called_with("get", "whatever", "lol") def not_when_kwarg_given(self): g = Group("host1", "host2") g._do = Mock() g.get(remote="whatever", local="lol") # Doesn't stomp given local arg g._do.assert_called_with("get", remote="whatever", local="lol") def _make_serial_tester(method, cxns, index, args, kwargs): args = args[:] kwargs = kwargs.copy() def tester(*a, **k): # Don't care about doing anything with our own args. car, cdr = index, index + 1 predecessors = cxns[:car] successors = cxns[cdr:] for predecessor in predecessors: getattr(predecessor, method).assert_called_with(*args, **kwargs) for successor in successors: assert not getattr(successor, method).called return tester class SerialGroup_: @mark.parametrize("method", ALL_METHODS) def executes_arguments_on_contents_run_serially(self, method): "executes arguments on contents' run() serially" cxns = [Connection(x) for x in ("host1", "host2", "host3")] args = ARGS_BY_METHOD[method] kwargs = KWARGS_BY_METHOD[method] for index, cxn in enumerate(cxns): side_effect = _make_serial_tester( method, cxns, index, args, kwargs ) setattr(cxn, method, Mock(side_effect=side_effect)) g = SerialGroup.from_connections(cxns) getattr(g, method)(*args, **kwargs) # Sanity check, e.g. in case none of them were actually run for cxn in cxns: getattr(cxn, method).assert_called_with(*args, **kwargs) @mark.parametrize("method", ALL_METHODS) def errors_in_execution_capture_and_continue_til_end(self, method): cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] class OhNoz(Exception): pass onoz = OhNoz() getattr(cxns[1], method).side_effect = onoz g = SerialGroup.from_connections(cxns) try: getattr(g, method)("whatever", hide=True) except GroupException as e: result = e.result else: assert False, "Did not raise GroupException!" succeeded = { cxns[0]: getattr(cxns[0], method).return_value, cxns[2]: getattr(cxns[2], method).return_value, } failed = {cxns[1]: onoz} expected = succeeded.copy() expected.update(failed) assert result == expected assert result.succeeded == succeeded assert result.failed == failed @mark.parametrize("method", ALL_METHODS) def returns_results_mapping(self, method): cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] g = SerialGroup.from_connections(cxns) result = getattr(g, method)("whatever", hide=True) assert isinstance(result, GroupResult) expected = {x: getattr(x, method).return_value for x in cxns} assert result == expected assert result.succeeded == expected assert result.failed == {} class ThreadingGroup_: def setup(self): self.cxns = [Connection(x) for x in ("host1", "host2", "host3")] @mark.parametrize("method", ALL_METHODS) @patch("fabric.group.Queue") @patch("fabric.group.ExceptionHandlingThread") def executes_arguments_on_contents_run_via_threading( self, Thread, Queue, method ): queue = Queue.return_value g = ThreadingGroup.from_connections(self.cxns) # Make sure .exception() doesn't yield truthy Mocks. Otherwise we # end up with 'exceptions' that cause errors due to all being the # same. Thread.return_value.exception.return_value = None args = ARGS_BY_METHOD[method] kwargs = KWARGS_BY_METHOD[method] getattr(g, method)(*args, **kwargs) # Testing that threads were used the way we expect is mediocre but # I honestly can't think of another good way to assert "threading # was used & concurrency occurred"... instantiations = [ call( target=thread_worker, kwargs=dict( cxn=cxn, queue=queue, method=method, args=args, kwargs=kwargs, ), ) for cxn in self.cxns ] Thread.assert_has_calls(instantiations, any_order=True) # These ought to work as by default a Mock.return_value is a # singleton mock object expected = len(self.cxns) for name, got in ( ("start", Thread.return_value.start.call_count), ("join", Thread.return_value.join.call_count), ): err = ( "Expected {} calls to ExceptionHandlingThread.{}, got {}" ) # noqa err = err.format(expected, name, got) assert expected, got == err @mark.parametrize("method", ALL_METHODS) @patch("fabric.group.Queue") def queue_used_to_return_results(self, Queue, method): # Regular, explicit, mocks for Connections cxns = [Mock(host=x) for x in ("host1", "host2", "host3")] # Set up Queue with enough behavior to work / assert queue = Queue.return_value # Ending w/ a True will terminate a while-not-empty loop queue.empty.side_effect = (False, False, False, True) fakes = [(x, getattr(x, method).return_value) for x in cxns] queue.get.side_effect = fakes[:] # Execute & inspect results g = ThreadingGroup.from_connections(cxns) results = getattr(g, method)( *ARGS_BY_METHOD[method], **KWARGS_BY_METHOD[method] ) expected = {x: getattr(x, method).return_value for x in cxns} assert results == expected # Make sure queue was used as expected within worker & # ThreadingGroup.run() puts = [call(x) for x in fakes] queue.put.assert_has_calls(puts, any_order=True) assert queue.empty.called gets = [call(block=False) for _ in cxns] queue.get.assert_has_calls(gets) @mark.parametrize("method", ALL_METHODS) def bubbles_up_errors_within_threads(self, method): # TODO: I feel like this is the first spot where a raw # ThreadException might need tweaks, at least presentation-wise, # since we're no longer dealing with truly background threads (IO # workers and tunnels), but "middle-ground" threads the user is # kind of expecting (and which they might expect to encounter # failures). cxns = [Mock(host=x) for x in ("host1", "host2", "host3")] class OhNoz(Exception): pass onoz = OhNoz() getattr(cxns[1], method).side_effect = onoz g = ThreadingGroup.from_connections(cxns) try: getattr(g, method)( *ARGS_BY_METHOD[method], **KWARGS_BY_METHOD[method] ) except GroupException as e: result = e.result else: assert False, "Did not raise GroupException!" succeeded = { cxns[0]: getattr(cxns[0], method).return_value, cxns[2]: getattr(cxns[2], method).return_value, } failed = {cxns[1]: onoz} expected = succeeded.copy() expected.update(failed) assert result == expected assert result.succeeded == succeeded assert result.failed == failed @mark.parametrize("method", ALL_METHODS) def returns_results_mapping(self, method): cxns = [Mock(name=x) for x in ("host1", "host2", "host3")] g = ThreadingGroup.from_connections(cxns) result = getattr(g, method)("whatever", hide=True) assert isinstance(result, GroupResult) expected = {x: getattr(x, method).return_value for x in cxns} assert result == expected assert result.succeeded == expected assert result.failed == {} fabric-2.6.0/tests/init.py000066400000000000000000000021201400143053200154070ustar00rootroot00000000000000import fabric from fabric import _version, connection, runners, group, tasks, executor class init: "__init__" def version_and_version_info(self): for name in ("__version_info__", "__version__"): assert getattr(_version, name) == getattr(fabric, name) def Connection(self): assert fabric.Connection is connection.Connection def Remote(self): assert fabric.Remote is runners.Remote def Result(self): assert fabric.Result is runners.Result def Config(self): assert fabric.Config is connection.Config def Group(self): assert fabric.Group is group.Group def SerialGroup(self): assert fabric.SerialGroup is group.SerialGroup def ThreadingGroup(self): assert fabric.ThreadingGroup is group.ThreadingGroup def GroupResult(self): assert fabric.GroupResult is group.GroupResult def task(self): assert fabric.task is tasks.task def Task(self): assert fabric.Task is tasks.Task def Executor(self): assert fabric.Executor is executor.Executor fabric-2.6.0/tests/main.py000066400000000000000000000331051400143053200153770ustar00rootroot00000000000000""" Tests concerned with the ``fab`` tool & how it overrides Invoke defaults. """ import os import sys import re from invoke import run from invoke.util import cd from mock import patch import pytest # because WHY would you expose @skip normally? -_- from pytest_relaxed import raises from fabric.config import Config from fabric.main import make_program from fabric.exceptions import NothingToDo from fabric.testing.base import Session from _util import expect, support, config_file, trap # Designate a runtime config file intended for the test environment; it does # things like automatically mute stdin so test harnesses that care about stdin # don't get upset. # NOTE: this requires the test environment to have Invoke 1.1.0 or above; for # now this is fine as we don't do a big serious matrix, we typically use Invoke # master to allow testing in-dev changes. # TODO: if that _changes_ then we may have to rethink this so that it goes back # to being testable on Invoke >=1.0 instead of >=1.1... os.environ["INVOKE_RUNTIME_CONFIG"] = config_file class Fab_: class core_program_behavior: def version_output_contains_our_name_plus_deps(self): expect( "--version", r""" Fabric .+ Paramiko .+ Invoke .+ """.strip(), test="regex", ) def help_output_says_fab(self): expect("--help", "Usage: fab", test="contains") def exposes_hosts_flag_in_help(self): expect("--help", "-H STRING, --hosts=STRING", test="contains") def executes_remainder_as_anonymous_task(self, remote): remote.expect(host="myhost", cmd="whoami") make_program().run("fab -H myhost -- whoami", exit=False) def uses_FABRIC_env_prefix(self, environ): environ["FABRIC_RUN_ECHO"] = "1" with cd(support): make_program().run("fab expect-from-env") def basic_pre_and_post_tasks_still_work(self): with cd(support): # Sanity expect("first", "First!\n") expect("third", "Third!\n") # Real test expect("second", "First!\nSecond!\nThird!\n") class filenames: def loads_fabfile_not_tasks(self): "Loads fabfile.py, not tasks.py" with cd(support): expect( "--list", """ Available tasks: basic-run build deploy expect-connect-timeout expect-from-env expect-identities expect-identity expect-mutation expect-mutation-to-fail expect-vanilla-Context first hosts-are-host-stringlike hosts-are-init-kwargs hosts-are-mixed-values hosts-are-myhost mutate second third two-hosts vanilla-Task-works-ok """.lstrip(), ) def loads_fabric_config_files_not_invoke_ones(self): for type_ in ("yaml", "yml", "json", "py"): with cd(os.path.join(support, "{}_conf".format(type_))): # This task, in each subdir, expects data present in a # fabric. nearby to show up in the config. make_program().run("fab expect-conf-value") class runtime_ssh_config_path: def _run( self, flag="-S", file_="ssh_config/runtime.conf", tasks="runtime-ssh-config", ): with cd(support): # Relies on asserts within the task, which will bubble up as # it's executed in-process cmd = "fab -c runtime_fabfile {} {} -H runtime {}" make_program().run(cmd.format(flag, file_, tasks)) def capital_F_flag_specifies_runtime_ssh_config_file(self): self._run(flag="-S") def long_form_flag_also_works(self): self._run(flag="--ssh-config") @raises(IOError) def IOErrors_if_given_missing_file(self): self._run(file_="nope/nothere.conf") @patch.object(Config, "_load_ssh_file") def config_only_loaded_once_per_session(self, method): # Task that doesn't make assertions about the config (since the # _actual_ config it gets is empty as we had to mock out the loader # method...sigh) self._run(tasks="dummy dummy") # Called only once (initial __init__) with runtime conf, instead of # that plus a few more pairs of calls against the default files # (which is what happens when clone() isn't preserving the # already-parsed/loaded SSHConfig) method.assert_called_once_with("ssh_config/runtime.conf") class hosts_flag_parameterizes_tasks: # NOTE: many of these just rely on MockRemote's builtin # "channel.exec_command called with given command string" asserts. def single_string_is_single_host_and_single_exec(self, remote): remote.expect(host="myhost", cmd="nope") # In addition to just testing a base case, this checks for a really # dumb bug where one appends to, instead of replacing, the task # list during parameterization/expansion XD with cd(support): make_program().run("fab -H myhost basic-run") def comma_separated_string_is_multiple_hosts(self, remote): remote.expect_sessions( Session("host1", cmd="nope"), Session("host2", cmd="nope") ) with cd(support): make_program().run("fab -H host1,host2 basic-run") def multiple_hosts_works_with_remainder_too(self, remote): remote.expect_sessions( Session("host1", cmd="whoami"), Session("host2", cmd="whoami") ) make_program().run("fab -H host1,host2 -- whoami") def host_string_shorthand_is_passed_through(self, remote): remote.expect(host="host1", port=1234, user="someuser") make_program().run("fab -H someuser@host1:1234 -- whoami") # NOTE: no mocking because no actual run() under test, only # parameterization # TODO: avoiding for now because implementing this requires more work # at the Invoke level re: deciding when to _not_ pass in the # session-global config object (Executor's self.config). At the moment, # our threading-concurrency API is oriented around Group, and we're not # using it for --hosts, so it's not broken...yet. @pytest.mark.skip def config_mutation_not_preserved(self): with cd(support): make_program().run( "fab -H host1,host2 expect-mutation-to-fail" ) @trap def pre_post_tasks_are_not_parameterized_across_hosts(self): with cd(support): make_program().run( "fab -H hostA,hostB,hostC second --show-host" ) output = sys.stdout.getvalue() # Expect pre once, 3x main, post once, as opposed to e.g. both # pre and main task expected = """ First! Second: hostA Second: hostB Second: hostC Third! """.lstrip() assert output == expected class hosts_task_arg_parameterizes_tasks: # NOTE: many of these just rely on MockRemote's builtin # "channel.exec_command called with given command string" asserts. def single_string_is_single_exec(self, remote): remote.expect(host="myhost", cmd="nope") with cd(support): make_program().run("fab hosts-are-myhost") def multiple_strings_is_multiple_host_args(self, remote): remote.expect_sessions( Session("host1", cmd="nope"), Session("host2", cmd="nope") ) with cd(support): make_program().run("fab two-hosts") def host_string_shorthand_works_ok(self, remote): remote.expect(host="host1", port=1234, user="someuser") with cd(support): make_program().run("fab hosts-are-host-stringlike") def may_give_Connection_init_kwarg_dicts(self, remote): remote.expect_sessions( Session("host1", user="admin", cmd="nope"), Session("host2", cmd="nope"), ) with cd(support): make_program().run("fab hosts-are-init-kwargs") def may_give_mixed_value_types(self, remote): remote.expect_sessions( Session("host1", user="admin", cmd="nope"), Session("host2", cmd="nope"), ) with cd(support): make_program().run("fab hosts-are-mixed-values") class no_hosts_flag_or_task_arg: def calls_task_once_with_invoke_context(self): with cd(support): make_program().run("fab expect-vanilla-Context") def vanilla_Invoke_task_works_too(self): with cd(support): make_program().run("fab vanilla-Task-works-ok") @raises(NothingToDo) def generates_exception_if_combined_with_remainder(self): make_program().run("fab -- nope") def invokelike_multitask_invocation_preserves_config_mutation(self): # Mostly a guard against Executor subclass tweaks breaking Invoke # behavior added in pyinvoke/invoke#309 with cd(support): make_program().run("fab mutate expect-mutation") class connect_timeout: def dash_t_supplies_default_connect_timeout(self): with cd(support): make_program().run("fab -t 5 expect-connect-timeout") def double_dash_connect_timeout_also_works(self): with cd(support): make_program().run( "fab --connect-timeout 5 expect-connect-timeout" ) class runtime_identity_file: def dash_i_supplies_default_connect_kwarg_key_filename(self): # NOTE: the expect-identity task in tests/_support/fabfile.py # performs asserts about its context's .connect_kwargs value, # relying on other tests to prove connect_kwargs makes its way into # that context. with cd(support): make_program().run("fab -i identity.key expect-identity") def double_dash_identity_also_works(self): with cd(support): make_program().run( "fab --identity identity.key expect-identity" ) def may_be_given_multiple_times(self): with cd(support): make_program().run( "fab -i identity.key -i identity2.key expect-identities" ) class secrets_prompts: @patch("fabric.main.getpass.getpass") def _expect_prompt(self, getpass, flag, key, value, prompt): getpass.return_value = value with cd(support): # Expect that the given key was found in the context. cmd = "fab -c prompting {} expect-connect-kwarg --key {} --val {}" # noqa make_program().run(cmd.format(flag, key, value)) # Then we also expect that getpass was called w/ expected prompt getpass.assert_called_once_with(prompt) def password_prompt_updates_connect_kwargs(self): self._expect_prompt( flag="--prompt-for-login-password", key="password", value="mypassword", prompt="Enter login password for use with SSH auth: ", ) def passphrase_prompt_updates_connect_kwargs(self): self._expect_prompt( flag="--prompt-for-passphrase", key="passphrase", value="mypassphrase", prompt="Enter passphrase for use unlocking SSH keys: ", ) class configuration_updating_and_merging: def key_filename_can_be_set_via_non_override_config_levels(self): # Proves/protects against #1762, where eg key_filenames gets # 'reset' to an empty list. Arbitrarily uses the 'yml' level of # test fixtures, which has a fabric.yml w/ a # connect_kwargs.key_filename value of [private.key, other.key]. with cd(os.path.join(support, "yml_conf")): make_program().run("fab expect-conf-key-filename") def cli_identity_still_overrides_when_non_empty(self): with cd(os.path.join(support, "yml_conf")): make_program().run("fab -i cli.key expect-cli-key-filename") class completion: # NOTE: most completion tests are in Invoke too; this is just an # irritating corner case driven by Fabric's 'remainder' functionality. @trap def complete_flag_does_not_trigger_remainder_only_behavior(self): # When bug present, 'fab --complete -- fab' fails to load any # collections because it thinks it's in remainder-only, # work-without-a-collection mode. with cd(support): make_program().run("fab --complete -- fab", exit=False) # Cherry-picked sanity checks looking for tasks from fixture # fabfile output = sys.stdout.getvalue() for name in ("build", "deploy", "expect-from-env"): assert name in output class main: "__main__" def python_dash_m_acts_like_fab(self, capsys): # Rehash of version output test, but using 'python -m fabric' expected_output = r""" Fabric .+ Paramiko .+ Invoke .+ """.strip() output = run("python -m fabric --version", hide=True, in_stream=False) assert re.match(expected_output, output.stdout) fabric-2.6.0/tests/runners.py000066400000000000000000000117701400143053200161530ustar00rootroot00000000000000try: from invoke.vendor.six import StringIO except ImportError: from six import StringIO from mock import Mock from pytest import skip # noqa from invoke import pty_size, Result from fabric import Config, Connection, Remote # On most systems this will explode if actually executed as a shell command; # this lets us detect holes in our network mocking. CMD = "nope" # TODO: see TODO in tests/main.py above _run_fab(), this is the same thing. def _Connection(*args, **kwargs): kwargs["config"] = Config({"run": {"in_stream": False}}) return Connection(*args, **kwargs) def _runner(): return Remote(context=_Connection("host")) class Remote_: def needs_handle_on_a_Connection(self): c = _Connection("host") assert Remote(context=c).context is c class run: def calls_expected_paramiko_bits(self, remote): # remote mocking makes generic sanity checks like "were # get_transport and open_session called", but we also want to make # sure that exec_command got run with our arg to run(). remote.expect(cmd=CMD) _runner().run(CMD) def writes_remote_streams_to_local_streams(self, remote): remote.expect(out=b"hello yes this is dog") fakeout = StringIO() _runner().run(CMD, out_stream=fakeout) assert fakeout.getvalue() == "hello yes this is dog" def pty_True_uses_paramiko_get_pty(self, remote): chan = remote.expect() _runner().run(CMD, pty=True) cols, rows = pty_size() chan.get_pty.assert_called_with(width=cols, height=rows) def return_value_is_Result_subclass_exposing_cxn_used(self, remote): c = _Connection("host") result = Remote(context=c).run(CMD) assert isinstance(result, Result) # Mild sanity test for other Result superclass bits assert result.ok is True assert result.exited == 0 # Test the attr our own subclass adds assert result.connection is c def channel_is_closed_normally(self, remote): chan = remote.expect() # I.e. Remote.stop() closes the channel automatically _runner().run(CMD) chan.close.assert_called_once_with() def channel_is_closed_on_body_exceptions(self, remote): chan = remote.expect() # I.e. Remote.stop() is called within a try/finally. # Technically is just testing invoke.Runner, but meh. class Oops(Exception): pass class _OopsRemote(Remote): def wait(self): raise Oops() r = _OopsRemote(context=_Connection("host")) try: r.run(CMD) except Oops: chan.close.assert_called_once_with() else: assert False, "Runner failed to raise exception!" def channel_close_skipped_when_channel_not_even_made(self): # I.e. if obtaining self.channel doesn't even happen (i.e. if # Connection.create_session() dies), we need to account for that # case... class Oops(Exception): pass def oops(): raise Oops cxn = _Connection("host") cxn.create_session = oops # When bug present, this will result in AttributeError because # Remote has no 'channel' try: Remote(context=cxn).run(CMD) except Oops: pass else: assert False, "Weird, Oops never got raised..." # TODO: how much of Invoke's tests re: the upper level run() (re: # things like returning Result, behavior of Result, etc) to # duplicate here? Ideally none or very few core ones. # TODO: only test guts of our stuff, Invoke's Runner tests should # handle all the normal shit like stdout/err print and capture. # Implies we want a way to import & run those tests ourselves, though, # with the Runner instead being a Remote. Or do we just replicate the # basics? # TODO: all other run() tests from fab1... class start: def sends_env_to_paramiko_update_environment_by_default(self, remote): chan = remote.expect() _runner().run(CMD, env={"FOO": "bar"}) chan.update_environment.assert_called_once_with({"FOO": "bar"}) def uses_export_prefixing_when_inline_env_is_True(self, remote): chan = remote.expect( cmd="export DEBUG=1 PATH=/opt/bin && {}".format(CMD) ) r = Remote(context=_Connection("host"), inline_env=True) r.run(CMD, env={"PATH": "/opt/bin", "DEBUG": "1"}) assert not chan.update_environment.called def kill_closes_the_channel(self): runner = _runner() runner.channel = Mock() runner.kill() runner.channel.close.assert_called_once_with() fabric-2.6.0/tests/task.py000066400000000000000000000104251400143053200154150ustar00rootroot00000000000000# NOTE: named task.py, not tasks.py, to avoid some occasional pytest weirdness from mock import Mock from pytest import skip # noqa import fabric from fabric.tasks import ConnectionCall class Task_: def accepts_Invoke_level_init_kwargs(self): # Arbitrarily selected list of invoke-level kwargs... def body(c, parts): "I am a docstring" pass t = fabric.Task( body=body, name="dadbod", aliases=["heavenly", "check", "shop"], default=True, help={"parts": "See: the sum of"}, iterable=["parts"], ) assert t.body is body assert t.__doc__ == "I am a docstring" assert t.name == "dadbod" assert "heavenly" in t.aliases assert t.is_default assert "parts" in t.help assert "parts" in t.iterable def allows_hosts_kwarg(self): # NOTE: most tests are below, in @task tests assert fabric.Task(Mock(), hosts=["user@host"]).hosts == ["user@host"] class task_: def accepts_Invoke_level_kwargs(self): # Arbitrarily selected list of invoke-level kwargs... def body(c, parts): "I am a docstring" pass # Faux @task() t = fabric.task( name="dadbod", aliases=["heavenly", "check", "shop"], default=True, help={"parts": "See: the sum of"}, iterable=["parts"], )(body) assert t.body is body assert t.__doc__ == "I am a docstring" assert t.name == "dadbod" assert "heavenly" in t.aliases assert t.is_default assert "parts" in t.help assert "parts" in t.iterable def returns_Fabric_level_Task_instance(self): assert isinstance(fabric.task(Mock()), fabric.Task) def does_not_touch_klass_kwarg_if_explicitly_given(self): # Otherwise sub-subclassers would be screwed, yea? class SubFabTask(fabric.Task): pass assert isinstance(fabric.task(klass=SubFabTask)(Mock()), SubFabTask) class hosts_kwarg: # NOTE: these don't currently test anything besides "the value given is # attached as .hosts" but they guard against regressions and ensures # things work as documented, even if Executor is what really cares. def _run(self, hosts): @fabric.task(hosts=hosts) def mytask(c): pass assert mytask.hosts == hosts def values_may_be_connection_first_posarg_strings(self): self._run(["host1", "user@host2", "host3:2222"]) def values_may_be_Connection_constructor_kwarg_dicts(self): self._run( [ {"host": "host1"}, {"host": "host2", "user": "user"}, {"host": "host3", "port": 2222}, ] ) def values_may_be_mixed(self): self._run([{"host": "host1"}, "user@host2"]) def _dummy(c): pass class ConnectionCall_: class init: "__init__" def inherits_regular_kwargs(self): t = fabric.Task(_dummy) call = ConnectionCall( task=t, called_as="meh", args=["5"], kwargs={"kwarg": "val"}, init_kwargs={}, # whatever ) assert call.task is t assert call.called_as == "meh" assert call.args == ["5"] assert call.kwargs["kwarg"] == "val" def extends_with_init_kwargs_kwarg(self): call = ConnectionCall( task=fabric.Task(_dummy), init_kwargs={"host": "server", "port": 2222}, ) assert call.init_kwargs["port"] == 2222 class str: "___str__" def includes_init_kwargs_host_value(self): call = ConnectionCall( fabric.Task(body=_dummy), init_kwargs=dict(host="host", user="user"), ) # TODO: worth using some subset of real Connection repr() in here? # For now, just stick with hostname. expected = ( "" ) # noqa assert str(call) == expected fabric-2.6.0/tests/transfer.py000066400000000000000000000277541400143053200163140ustar00rootroot00000000000000try: from invoke.vendor.six import StringIO except ImportError: from six import StringIO from mock import Mock, call, patch from pytest_relaxed import raises from pytest import skip # noqa from paramiko import SFTPAttributes from fabric import Connection from fabric.transfer import Transfer # TODO: pull in all edge/corner case tests from fabric v1 class Transfer_: class init: "__init__" def requires_connection(self): # Transfer() -> explodes try: Transfer() except TypeError: pass else: assert False, "Did not raise ArgumentError" # Transfer(Connection()) -> happy, exposes an attribute cxn = Connection("host") assert Transfer(cxn).connection is cxn class is_remote_dir: def returns_bool_of_stat_ISDIR_flag(self, sftp_objs): xfer, sftp = sftp_objs # Default mocked st_mode is file-like (first octal digit is 1) assert xfer.is_remote_dir("whatever") is False # Set mode directory-ish (first octal digit is 4) sftp.stat.return_value.st_mode = 0o41777 assert xfer.is_remote_dir("whatever") is True def returns_False_if_stat_raises_IOError(self, sftp_objs): xfer, sftp = sftp_objs sftp.stat.side_effect = IOError assert xfer.is_remote_dir("whatever") is False class get: class basics: def accepts_single_remote_path_posarg(self, sftp_objs): transfer, client = sftp_objs transfer.get("file") client.get.assert_called_with( localpath="/local/file", remotepath="/remote/file" ) def accepts_local_and_remote_kwargs(self, sftp_objs): transfer, client = sftp_objs transfer.get(remote="path1", local="path2") client.get.assert_called_with( remotepath="/remote/path1", localpath="/local/path2" ) def returns_rich_Result_object(self, sftp_objs): transfer, client = sftp_objs cxn = Connection("host") result = Transfer(cxn).get("file") assert result.orig_remote == "file" assert result.remote == "/remote/file" assert result.orig_local is None assert result.local == "/local/file" assert result.connection is cxn # TODO: timing info # TODO: bytes-transferred info class path_arg_edge_cases: def local_None_uses_remote_filename(self, transfer): assert transfer.get("file").local == "/local/file" def local_empty_string_uses_remote_filename(self, transfer): assert transfer.get("file", local="").local == "/local/file" @raises(TypeError) def remote_arg_is_required(self, transfer): transfer.get() @raises(ValueError) def remote_arg_cannot_be_None(self, transfer): transfer.get(None) @raises(ValueError) def remote_arg_cannot_be_empty_string(self, transfer): transfer.get("") class local_arg_interpolation: def connection_params(self, transfer): result = transfer.get("somefile", "{user}@{host}-{port}") expected = "/local/{}@host-22".format(transfer.connection.user) assert result.local == expected def connection_params_as_dir(self, transfer): result = transfer.get("somefile", "{host}/") assert result.local == "/local/host/somefile" def remote_path_posixpath_bits(self, transfer): result = transfer.get( "parent/mid/leaf", "foo/{dirname}/bar/{basename}" ) # Recall that test harness sets remote apparent cwd as # /remote/, thus dirname is /remote/parent/mid assert result.local == "/local/foo/remote/parent/mid/bar/leaf" class file_like_local_paths: "file-like local paths" def _get_to_stringio(self, sftp_objs): transfer, client = sftp_objs fd = StringIO() result = transfer.get("file", local=fd) # Note: getfo, not get client.getfo.assert_called_with( remotepath="/remote/file", fl=fd ) return result, fd def remote_path_to_local_StringIO(self, sftp_objs): self._get_to_stringio(sftp_objs) def result_contains_fd_for_local_path(self, sftp_objs): result, fd = self._get_to_stringio(sftp_objs) assert result.remote == "/remote/file" assert result.local is fd class mode_concerns: def setup(self): self.attrs = SFTPAttributes() self.attrs.st_mode = 0o100644 def preserves_remote_mode_by_default(self, sftp): transfer, client, mock_os = sftp # Attributes obj reflecting a realistic 'extended' octal mode client.stat.return_value = self.attrs transfer.get("file", local="meh") # Expect os.chmod to be called with the scrubbed/shifted # version of same. mock_os.chmod.assert_called_with("/local/meh", 0o644) def allows_disabling_remote_mode_preservation(self, sftp): transfer, client, mock_os = sftp client.stat.return_value = self.attrs transfer.get("file", local="meh", preserve_mode=False) assert not mock_os.chmod.called class local_directory_creation: @patch("fabric.transfer.Path") def without_trailing_slash_means_leaf_file(self, Path, sftp_objs): transfer, client = sftp_objs transfer.get(remote="file", local="top/middle/leaf") client.get.assert_called_with( localpath="/local/top/middle/leaf", remotepath="/remote/file", ) Path.assert_called_with("top/middle") Path.return_value.mkdir.assert_called_with( parents=True, exist_ok=True ) @patch("fabric.transfer.Path") def with_trailing_slash_means_mkdir_entire_arg( self, Path, sftp_objs ): transfer, client = sftp_objs transfer.get(remote="file", local="top/middle/leaf/") client.get.assert_called_with( localpath="/local/top/middle/leaf/file", remotepath="/remote/file", ) Path.assert_called_with("top/middle/leaf/") Path.return_value.mkdir.assert_called_with( parents=True, exist_ok=True ) class put: class basics: def accepts_single_local_path_posarg(self, sftp_objs): transfer, client = sftp_objs transfer.put("file") client.put.assert_called_with( localpath="/local/file", remotepath="/remote/file" ) def accepts_local_and_remote_kwargs(self, sftp_objs): transfer, sftp = sftp_objs # NOTE: default mock stat is file-ish, so path won't be munged transfer.put(local="path2", remote="path1") sftp.put.assert_called_with( localpath="/local/path2", remotepath="/remote/path1" ) def returns_rich_Result_object(self, transfer): cxn = Connection("host") result = Transfer(cxn).put("file") assert result.orig_remote is None assert result.remote == "/remote/file" assert result.orig_local == "file" assert result.local == "/local/file" assert result.connection is cxn # TODO: timing info # TODO: bytes-transferred info class remote_end_is_directory: def appends_local_file_basename(self, sftp_objs): xfer, sftp = sftp_objs sftp.stat.return_value.st_mode = 0o41777 xfer.put(local="file.txt", remote="/dir/path/") sftp.stat.assert_called_once_with("/dir/path/") sftp.put.assert_called_with( localpath="/local/file.txt", remotepath="/dir/path/file.txt", ) class file_like_local_objects: def name_attribute_present_appends_like_basename( self, sftp_objs ): xfer, sftp = sftp_objs sftp.stat.return_value.st_mode = 0o41777 local = StringIO("sup\n") local.name = "sup.txt" xfer.put(local, remote="/dir/path") sftp.putfo.assert_called_with( fl=local, remotepath="/dir/path/sup.txt" ) @raises(ValueError) def no_name_attribute_raises_ValueError(self, sftp_objs): xfer, sftp = sftp_objs sftp.stat.return_value.st_mode = 0o41777 local = StringIO("sup\n") xfer.put(local, remote="/dir/path") class path_arg_edge_cases: def remote_None_uses_local_filename(self, transfer): assert transfer.put("file").remote == "/remote/file" def remote_empty_string_uses_local_filename(self, transfer): assert transfer.put("file", remote="").remote == "/remote/file" @raises(ValueError) def remote_cant_be_empty_if_local_file_like(self, transfer): transfer.put(StringIO()) @raises(TypeError) def local_arg_is_required(self, transfer): transfer.put() @raises(ValueError) def local_arg_cannot_be_None(self, transfer): transfer.put(None) @raises(ValueError) def local_arg_cannot_be_empty_string(self, transfer): transfer.put("") class file_like_local_paths: "file-like local paths" def _put_from_stringio(self, sftp_objs): transfer, client = sftp_objs fd = StringIO() result = transfer.put(fd, remote="file") # Note: putfo, not put client.putfo.assert_called_with( remotepath="/remote/file", fl=fd ) return result, fd def remote_path_from_local_StringIO(self, sftp_objs): self._put_from_stringio(sftp_objs) def local_FLOs_are_rewound_before_putting(self, transfer): fd = Mock() fd.tell.return_value = 17 transfer.put(fd, remote="file") seek_calls = fd.seek.call_args_list assert seek_calls, [call(0) == call(17)] def result_contains_fd_for_local_path(self, sftp_objs): result, fd = self._put_from_stringio(sftp_objs) assert result.remote == "/remote/file" assert result.local is fd class mode_concerns: def preserves_local_mode_by_default(self, sftp): transfer, client, mock_os = sftp # This is a realistic stat for 0o644 mock_os.stat.return_value.st_mode = 33188 transfer.put("file") client.chmod.assert_called_with("/remote/file", 0o644) def allows_disabling_local_mode_preservation(self, sftp_objs): transfer, client = sftp_objs transfer.put("file", preserve_mode=False) assert not client.chmod.called fabric-2.6.0/tests/util.py000066400000000000000000000013361400143053200154310ustar00rootroot00000000000000""" Tests testing the fabric.util module, not utils for the tests! """ from mock import patch from fabric.util import get_local_user # Basically implementation tests, because it's not feasible to do a "real" test # on random platforms (where we have no idea what the actual invoking user is) class get_local_user_: @patch("getpass.getuser") def defaults_to_getpass_getuser(self, getuser): "defaults to getpass.getuser" get_local_user() getuser.assert_called_once_with() @patch("getpass.getuser", side_effect=KeyError) def KeyError_means_SaaS_and_thus_None(self, getuser): assert get_local_user() is None # TODO: test for ImportError+win32 once appveyor is set up as w/ invoke