pax_global_header00006660000000000000000000000064143216310040014504gustar00rootroot0000000000000052 comment=0f45469309d355db7939236d0f3f1d3414de898f aioftp-0.21.4/000077500000000000000000000000001432163100400130525ustar00rootroot00000000000000aioftp-0.21.4/.coveragerc000066400000000000000000000000671432163100400151760ustar00rootroot00000000000000[report] show_missing = True omit = aioftp/__main__.py aioftp-0.21.4/.github/000077500000000000000000000000001432163100400144125ustar00rootroot00000000000000aioftp-0.21.4/.github/workflows/000077500000000000000000000000001432163100400164475ustar00rootroot00000000000000aioftp-0.21.4/.github/workflows/ci.yml000066400000000000000000000020671432163100400175720ustar00rootroot00000000000000name: build on: [push, pull_request] jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: '3.x' - run: | pip install flake8 flake8 aioftp tests tests: needs: lint runs-on: ubuntu-latest strategy: matrix: python-version: ['3.7', '3.8', '3.9', '3.10'] steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - run: | pip install -e ./[tests] pytest - uses: codecov/codecov-action@v2 with: fail_ci_if_error: true verbose: true deploy: needs: tests runs-on: ubuntu-latest if: github.ref == 'refs/heads/master' steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 - uses: casperdcl/deploy-pypi@v2 with: password: ${{ secrets.PYPI_TOKEN }} build: true skip_existing: true aioftp-0.21.4/.gitignore000066400000000000000000000002341432163100400150410ustar00rootroot00000000000000docs/_build /build/ /dist/ /.coverage __pycache__ *.pyc *.egg-info .idea /.python-version /.eggs /tags /*.py .vscode .mypy_cache .pytest_cache coverage.xml aioftp-0.21.4/MANIFEST.in000066400000000000000000000002031432163100400146030ustar00rootroot00000000000000include README.rst include license.txt include history.rst include doc-requirements.txt include nose.cfg recursive-include tests * aioftp-0.21.4/README.rst000066400000000000000000000121731432163100400145450ustar00rootroot00000000000000.. aioftp documentation master file, created by sphinx-quickstart on Fri Apr 17 16:21:03 2015. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. aioftp ====== .. image:: https://github.com/aio-libs/aioftp/actions/workflows/ci.yml/badge.svg?branch=master :target: https://github.com/aio-libs/aioftp/actions/workflows/ci.yml :alt: Github actions ci for master branch .. image:: https://codecov.io/gh/aio-libs/aioftp/branch/master/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aioftp .. image:: https://img.shields.io/pypi/v/aioftp.svg :target: https://pypi.python.org/pypi/aioftp .. image:: https://img.shields.io/pypi/pyversions/aioftp.svg :target: https://pypi.python.org/pypi/aioftp .. image:: https://pepy.tech/badge/aioftp/month :target: https://pypi.python.org/pypi/aioftp ftp client/server for asyncio (http://aioftp.readthedocs.org) .. _GitHub: https://github.com/aio-libs/aioftp Features -------- - Simple. - Extensible. - Client socks proxy via `siosocks `_ (`pip install aioftp[socks]`). Goals ----- - Minimum usable core. - Do not use deprecated or overridden commands and features (if possible). - Very high level api. Client use this commands: USER, PASS, ACCT, PWD, CWD, CDUP, MKD, RMD, MLSD, MLST, RNFR, RNTO, DELE, STOR, APPE, RETR, TYPE, PASV, ABOR, QUIT, REST, LIST (as fallback) Server support this commands: USER, PASS, QUIT, PWD, CWD, CDUP, MKD, RMD, MLSD, LIST (but it's not recommended to use it, cause it has no standard format), MLST, RNFR, RNTO, DELE, STOR, RETR, TYPE ("I" and "A"), PASV, ABOR, APPE, REST This subsets are enough for 99% of tasks, but if you need something, then you can easily extend current set of commands. Server benchmark ---------------- Compared with `pyftpdlib `_ and checked with its ftpbench script. aioftp 0.8.0 :: STOR (client -> server) 284.95 MB/sec RETR (server -> client) 408.44 MB/sec 200 concurrent clients (connect, login) 0.18 secs STOR (1 file with 200 idle clients) 287.52 MB/sec RETR (1 file with 200 idle clients) 382.05 MB/sec 200 concurrent clients (RETR 10.0M file) 13.33 secs 200 concurrent clients (STOR 10.0M file) 12.56 secs 200 concurrent clients (QUIT) 0.03 secs pyftpdlib 1.5.2 :: STOR (client -> server) 1235.56 MB/sec RETR (server -> client) 3960.21 MB/sec 200 concurrent clients (connect, login) 0.06 secs STOR (1 file with 200 idle clients) 1208.58 MB/sec RETR (1 file with 200 idle clients) 3496.03 MB/sec 200 concurrent clients (RETR 10.0M file) 0.55 secs 200 concurrent clients (STOR 10.0M file) 1.46 secs 200 concurrent clients (QUIT) 0.02 secs Dependencies ------------ - Python 3.7+ 0.13.0 is the last version which supports python 3.5.3+ 0.16.1 is the last version which supports python 3.6+ License ------- aioftp is offered under the Apache 2 license. Library installation -------------------- :: pip install aioftp Getting started --------------- Client example **WARNING** For all commands, which use some sort of «stats» or «listing», ``aioftp`` tries at first ``MLSx``-family commands (since they have structured, machine readable format for all platforms). But old/lazy/nasty servers do not implement this commands. In this case ``aioftp`` tries a ``LIST`` command, which have no standard format and can not be parsed in all cases. Take a look at `FileZilla `_ «directory listing» parser code. So, before creating new issue be sure this is not your case (you can check it with logs). Anyway, you can provide your own ``LIST`` parser routine (see the client documentation). .. code-block:: python import asyncio import aioftp async def get_mp3(host, port, login, password): async with aioftp.Client.context(host, port, login, password) as client: for path, info in (await client.list(recursive=True)): if info["type"] == "file" and path.suffix == ".mp3": await client.download(path) async def main(): tasks = [ asyncio.create_task(get_mp3("server1.com", 21, "login", "password")), asyncio.create_task(get_mp3("server2.com", 21, "login", "password")), asyncio.create_task(get_mp3("server3.com", 21, "login", "password")), ] await asyncio.wait(tasks) asyncio.run(main()) Server example .. code-block:: python import asyncio import aioftp async def main(): server = aioftp.Server([user], path_io_factory=path_io_factory) await server.run() asyncio.run(main()) Or just use simple server .. code-block:: shell python -m aioftp --help aioftp-0.21.4/aioftp/000077500000000000000000000000001432163100400143345ustar00rootroot00000000000000aioftp-0.21.4/aioftp/__init__.py000066400000000000000000000006031432163100400164440ustar00rootroot00000000000000"""ftp client/server for asyncio""" # flake8: noqa from .client import * from .common import * from .errors import * from .pathio import * from .server import * __version__ = "0.21.4" version = tuple(map(int, __version__.split("."))) __all__ = ( client.__all__ + server.__all__ + errors.__all__ + common.__all__ + pathio.__all__ + ("version", "__version__") ) aioftp-0.21.4/aioftp/__main__.py000066400000000000000000000042061432163100400164300ustar00rootroot00000000000000"""Simple aioftp-based server with one user (anonymous or not)""" import argparse import asyncio import contextlib import logging import socket import aioftp parser = argparse.ArgumentParser( prog="aioftp", usage="%(prog)s [options]", description="Simple aioftp-based server with one user (anonymous or not)." ) parser.add_argument("--user", metavar="LOGIN", dest="login", help="user name to login") parser.add_argument("--pass", metavar="PASSWORD", dest="password", help="password to login") parser.add_argument("-d", metavar="DIRECTORY", dest="home", help="the directory to share (default current directory)") parser.add_argument("-q", "--quiet", action="store_true", help="set logging level to 'ERROR' instead of 'INFO'") parser.add_argument("--memory", action="store_true", help="use memory storage") parser.add_argument("--host", default=None, help="host for binding [default: %(default)s]") parser.add_argument("--port", type=int, default=2121, help="port for binding [default: %(default)s]") parser.add_argument("--family", choices=("ipv4", "ipv6", "auto"), default="auto", help="Socket family [default: %(default)s]") args = parser.parse_args() print(f"aioftp v{aioftp.__version__}") if not args.quiet: logging.basicConfig( level=logging.INFO, format="%(asctime)s [%(name)s] %(message)s", datefmt="[%H:%M:%S]:", ) if args.memory: user = aioftp.User(args.login, args.password, base_path="/") path_io_factory = aioftp.MemoryPathIO else: if args.home: user = aioftp.User(args.login, args.password, base_path=args.home) else: user = aioftp.User(args.login, args.password) path_io_factory = aioftp.PathIO family = { "ipv4": socket.AF_INET, "ipv6": socket.AF_INET6, "auto": socket.AF_UNSPEC, }[args.family] async def main(): server = aioftp.Server([user], path_io_factory=path_io_factory) await server.run(args.host, args.port, family=family) with contextlib.suppress(KeyboardInterrupt): asyncio.run(main()) aioftp-0.21.4/aioftp/client.py000066400000000000000000001203231432163100400161650ustar00rootroot00000000000000import asyncio import calendar import collections import contextlib import datetime import logging import pathlib import re from functools import partial from . import errors, pathio from .common import ( DEFAULT_ACCOUNT, DEFAULT_BLOCK_SIZE, DEFAULT_PASSWORD, DEFAULT_PORT, DEFAULT_USER, END_OF_LINE, HALF_OF_YEAR_IN_SECONDS, TWO_YEARS_IN_SECONDS, AsyncListerMixin, StreamThrottle, ThrottleStreamIO, async_enterable, setlocale, wrap_with_container, ) try: from siosocks.io.asyncio import open_connection except ImportError: from asyncio import open_connection __all__ = ( "BaseClient", "Client", "DataConnectionThrottleStreamIO", "Code", ) logger = logging.getLogger(__name__) class Code(str): """ Representation of server status code. """ def matches(self, mask): """ :param mask: Template for comparision. If mask symbol is not digit then it passes. :type mask: :py:class:`str` :: >>> Code("123").matches("1") True >>> Code("123").matches("1x3") True """ return all(map(lambda m, c: not m.isdigit() or m == c, mask, self)) class DataConnectionThrottleStreamIO(ThrottleStreamIO): """ Add `finish` method to :py:class:`aioftp.ThrottleStreamIO`, which is specific for data connection. This requires `client`. :param client: client class, which have :py:meth:`aioftp.Client.command` :type client: :py:class:`aioftp.BaseClient` :param *args: positional arguments passed to :py:class:`aioftp.ThrottleStreamIO` :param **kwargs: keyword arguments passed to :py:class:`aioftp.ThrottleStreamIO` """ def __init__(self, client, *args, **kwargs): super().__init__(*args, **kwargs) self.client = client async def finish(self, expected_codes="2xx", wait_codes="1xx"): """ :py:func:`asyncio.coroutine` Close connection and wait for `expected_codes` response from server passing `wait_codes`. :param expected_codes: tuple of expected codes or expected code :type expected_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` :param wait_codes: tuple of wait codes or wait code :type wait_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` """ self.close() await self.client.command(None, expected_codes, wait_codes) async def __aexit__(self, exc_type, exc, tb): if exc is None: await self.finish() else: self.close() class BaseClient: def __init__(self, *, socket_timeout=None, connection_timeout=None, read_speed_limit=None, write_speed_limit=None, path_timeout=None, path_io_factory=pathio.PathIO, encoding="utf-8", ssl=None, parse_list_line_custom=None, parse_list_line_custom_first=True, passive_commands=("epsv", "pasv"), **siosocks_asyncio_kwargs): self.socket_timeout = socket_timeout self.connection_timeout = connection_timeout self.throttle = StreamThrottle.from_limits( read_speed_limit, write_speed_limit, ) self.path_timeout = path_timeout self.path_io = path_io_factory(timeout=path_timeout) self.encoding = encoding self.stream = None self.ssl = ssl self.parse_list_line_custom = parse_list_line_custom self.parse_list_line_custom_first = parse_list_line_custom_first self._passive_commands = passive_commands self._open_connection = partial(open_connection, ssl=self.ssl, **siosocks_asyncio_kwargs) async def connect(self, host, port=DEFAULT_PORT): self.server_host = host self.server_port = port reader, writer = await asyncio.wait_for( self._open_connection(host, port), self.connection_timeout, ) self.stream = ThrottleStreamIO( reader, writer, throttles={"_": self.throttle}, timeout=self.socket_timeout, ) def close(self): """ Close connection. """ if self.stream is not None: self.stream.close() async def parse_line(self): """ :py:func:`asyncio.coroutine` Parsing server response line. :return: (code, line) :rtype: (:py:class:`aioftp.Code`, :py:class:`str`) :raises ConnectionResetError: if received data is empty (this means, that connection is closed) :raises asyncio.TimeoutError: if there where no data for `timeout` period """ line = await self.stream.readline() if not line: self.stream.close() raise ConnectionResetError s = line.decode(encoding=self.encoding).rstrip() logger.debug(s) return Code(s[:3]), s[3:] async def parse_response(self): """ :py:func:`asyncio.coroutine` Parsing full server response (all lines). :return: (code, lines) :rtype: (:py:class:`aioftp.Code`, :py:class:`list` of :py:class:`str`) :raises aioftp.StatusCodeError: if received code does not matches all already received codes """ code, rest = await self.parse_line() info = [rest] curr_code = code while rest.startswith("-") or not curr_code.isdigit(): curr_code, rest = await self.parse_line() if curr_code.isdigit(): info.append(rest) if curr_code != code: raise errors.StatusCodeError(code, curr_code, info) else: info.append(curr_code + rest) return code, info def check_codes(self, expected_codes, received_code, info): """ Checks if any of expected matches received. :param expected_codes: tuple of expected codes :type expected_codes: :py:class:`tuple` :param received_code: received code for matching :type received_code: :py:class:`aioftp.Code` :param info: list of response lines from server :type info: :py:class:`list` :raises aioftp.StatusCodeError: if received code does not matches any expected code """ if not any(map(received_code.matches, expected_codes)): raise errors.StatusCodeError(expected_codes, received_code, info) async def command(self, command=None, expected_codes=(), wait_codes=(), censor_after=None): """ :py:func:`asyncio.coroutine` Basic command logic. 1. Send command if not omitted. 2. Yield response until no wait code matches. 3. Check code for expected. :param command: command line :type command: :py:class:`str` :param expected_codes: tuple of expected codes or expected code :type expected_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` :param wait_codes: tuple of wait codes or wait code :type wait_codes: :py:class:`tuple` of :py:class:`str` or :py:class:`str` :param censor_after: index after which the line should be censored when logging :type censor_after: :py:class:`None` or :py:class:`int` """ expected_codes = wrap_with_container(expected_codes) wait_codes = wrap_with_container(wait_codes) if command: if censor_after: # Censor the user's command raw = command[:censor_after] stars = "*" * len(command[censor_after:]) logger.debug("%s%s", raw, stars) else: logger.debug(command) message = command + END_OF_LINE await self.stream.write(message.encode(encoding=self.encoding)) if expected_codes or wait_codes: code, info = await self.parse_response() while any(map(code.matches, wait_codes)): code, info = await self.parse_response() if expected_codes: self.check_codes(expected_codes, code, info) return code, info @staticmethod def parse_epsv_response(s): """ Parsing `EPSV` (`message (|||port|)`) response. :param s: response line :type s: :py:class:`str` :return: (ip, port) :rtype: (:py:class:`None`, :py:class:`int`) """ matches = tuple(re.finditer(r"\((.)\1\1\d+\1\)", s)) s = matches[-1].group() port = int(s[4:-2]) return None, port @staticmethod def parse_pasv_response(s): """ Parsing `PASV` server response. :param s: response line :type s: :py:class:`str` :return: (ip, port) :rtype: (:py:class:`str`, :py:class:`int`) """ sub, *_ = re.findall(r"[^(]*\(([^)]*)", s) nums = tuple(map(int, sub.split(","))) ip = ".".join(map(str, nums[:4])) port = (nums[4] << 8) | nums[5] return ip, port @staticmethod def parse_directory_response(s): """ Parsing directory server response. :param s: response line :type s: :py:class:`str` :rtype: :py:class:`pathlib.PurePosixPath` """ seq_quotes = 0 start = False directory = "" for ch in s: if not start: if ch == "\"": start = True else: if ch == "\"": seq_quotes += 1 else: if seq_quotes == 1: break elif seq_quotes == 2: seq_quotes = 0 directory += '"' directory += ch return pathlib.PurePosixPath(directory) @staticmethod def parse_unix_mode(s): """ Parsing unix mode strings ("rwxr-x--t") into hexacimal notation. :param s: mode string :type s: :py:class:`str` :return mode: :rtype: :py:class:`int` """ parse_rw = {"rw": 6, "r-": 4, "-w": 2, "--": 0} mode = 0 mode |= parse_rw[s[0:2]] << 6 mode |= parse_rw[s[3:5]] << 3 mode |= parse_rw[s[6:8]] if s[2] == "s": mode |= 0o4100 elif s[2] == "x": mode |= 0o0100 elif s[2] != "-": raise ValueError if s[5] == "s": mode |= 0o2010 elif s[5] == "x": mode |= 0o0010 elif s[5] != "-": raise ValueError if s[8] == "t": mode |= 0o1000 elif s[8] == "x": mode |= 0o0001 elif s[8] != "-": raise ValueError return mode @staticmethod def format_date_time(d): """ Formats dates from strptime in a consistent format :param d: return value from strptime :type d: :py:class:`datetime` :rtype: :py:class`str` """ return d.strftime("%Y%m%d%H%M00") @classmethod def parse_ls_date(cls, s, *, now=None): """ Parsing dates from the ls unix utility. For example, "Nov 18 1958", "Jan 03 2018", and "Nov 18 12:29". :param s: ls date :type s: :py:class:`str` :rtype: :py:class:`str` """ with setlocale("C"): try: if now is None: now = datetime.datetime.now() if s.startswith('Feb 29'): # Need to find the nearest previous leap year prev_leap_year = now.year while not calendar.isleap(prev_leap_year): prev_leap_year -= 1 d = datetime.datetime.strptime( f"{prev_leap_year} {s}", "%Y %b %d %H:%M" ) # Check if it's next leap year diff = (now - d).total_seconds() if diff > TWO_YEARS_IN_SECONDS: d = d.replace(year=prev_leap_year + 4) else: d = datetime.datetime.strptime(s, "%b %d %H:%M") d = d.replace(year=now.year) diff = (now - d).total_seconds() if diff > HALF_OF_YEAR_IN_SECONDS: d = d.replace(year=now.year + 1) elif diff < -HALF_OF_YEAR_IN_SECONDS: d = d.replace(year=now.year - 1) except ValueError: d = datetime.datetime.strptime(s, "%b %d %Y") return cls.format_date_time(d) def parse_list_line_unix(self, b): """ Attempt to parse a LIST line (similar to unix ls utility). :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ s = b.decode(encoding=self.encoding).rstrip() info = {} if s[0] == "-": info["type"] = "file" elif s[0] == "d": info["type"] = "dir" elif s[0] == "l": info["type"] = "link" else: info["type"] = "unknown" # TODO: handle symlinks(beware the symlink loop) info["unix.mode"] = self.parse_unix_mode(s[1:10]) s = s[10:].lstrip() i = s.index(" ") info["unix.links"] = s[:i] if not info["unix.links"].isdigit(): raise ValueError s = s[i:].lstrip() i = s.index(" ") info["unix.owner"] = s[:i] s = s[i:].lstrip() i = s.index(" ") info["unix.group"] = s[:i] s = s[i:].lstrip() i = s.index(" ") info["size"] = s[:i] if not info["size"].isdigit(): raise ValueError s = s[i:].lstrip() info["modify"] = self.parse_ls_date(s[:12].strip()) s = s[12:].strip() if info["type"] == "link": i = s.rindex(" -> ") link_dst = s[i + 4:] link_src = s[:i] i = -2 if link_dst[-1] == "\'" or link_dst[-1] == "\"" else -1 info["type"] = "dir" if link_dst[i] == "/" else "file" s = link_src return pathlib.PurePosixPath(s), info def parse_list_line_windows(self, b): """ Parsing Microsoft Windows `dir` output :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ line = b.decode(encoding=self.encoding).rstrip("\r\n") date_time_end = line.index("M") date_time_str = line[:date_time_end + 1].strip().split(" ") date_time_str = " ".join([x for x in date_time_str if len(x) > 0]) line = line[date_time_end + 1:].lstrip() with setlocale("C"): strptime = datetime.datetime.strptime date_time = strptime(date_time_str, "%m/%d/%Y %I:%M %p") info = {} info["modify"] = self.format_date_time(date_time) next_space = line.index(" ") if line.startswith(""): info["type"] = "dir" else: info["type"] = "file" info["size"] = line[:next_space].replace(",", "") if not info["size"].isdigit(): raise ValueError # This here could cause a problem if a filename started with # whitespace, but if we were to try to detect such a condition # we would have to make strong assumptions about the input format filename = line[next_space:].lstrip() if filename == "." or filename == "..": raise ValueError return pathlib.PurePosixPath(filename), info def parse_list_line(self, b): """ Parse LIST response with both Microsoft Windows® parser and UNIX parser :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ ex = [] parsers = [ self.parse_list_line_unix, self.parse_list_line_windows, ] if self.parse_list_line_custom_first: parsers = [self.parse_list_line_custom] + parsers else: parsers = parsers + [self.parse_list_line_custom] for parser in parsers: if parser is None: continue try: return parser(b) except (ValueError, KeyError, IndexError) as e: ex.append(e) raise ValueError("All parsers failed to parse", b, ex) def parse_mlsx_line(self, b): """ Parsing MLS(T|D) response. :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ if isinstance(b, bytes): s = b.decode(encoding=self.encoding) else: s = b line = s.rstrip() facts_found, _, name = line.partition(" ") entry = {} for fact in facts_found[:-1].split(";"): key, _, value = fact.partition("=") entry[key.lower()] = value return pathlib.PurePosixPath(name), entry class Client(BaseClient): """ FTP client. :param socket_timeout: timeout for read operations :type socket_timeout: :py:class:`float`, :py:class:`int` or `None` :param connection_timeout: timeout for connection :type connection_timeout: :py:class:`float`, :py:class:`int` or `None` :param read_speed_limit: download speed limit in bytes per second :type server_to_client_speed_limit: :py:class:`int` or `None` :param write_speed_limit: upload speed limit in bytes per second :type client_to_server_speed_limit: :py:class:`int` or `None` :param path_timeout: timeout for path-related operations (make directory, unlink file, etc.) :type path_timeout: :py:class:`float`, :py:class:`int` or :py:class:`None` :param path_io_factory: factory of «path abstract layer» :type path_io_factory: :py:class:`aioftp.AbstractPathIO` :param encoding: encoding to use for convertion strings to bytes :type encoding: :py:class:`str` :param ssl: if given and not false, a SSL/TLS transport is created (by default a plain TCP transport is created). If ssl is a ssl.SSLContext object, this context is used to create the transport; if ssl is True, a default context returned from ssl.create_default_context() is used. Please look :py:meth:`asyncio.loop.create_connection` docs. :type ssl: :py:class:`bool` or :py:class:`ssl.SSLContext` :param parse_list_line_custom: callable, which receive exactly one argument: line of type bytes. Should return tuple of Path object and dictionary with fields "modify", "type", "size". For more information see sources. :type parse_list_line_custom: callable :param parse_list_line_custom_first: Should be custom parser tried first or last :type parse_list_line_custom_first: :py:class:`bool` :param **siosocks_asyncio_kwargs: siosocks key-word only arguments """ async def connect(self, host, port=DEFAULT_PORT): """ :py:func:`asyncio.coroutine` Connect to server. :param host: host name for connection :type host: :py:class:`str` :param port: port number for connection :type port: :py:class:`int` """ await super().connect(host, port) code, info = await self.command(None, "220", "120") return info async def login(self, user=DEFAULT_USER, password=DEFAULT_PASSWORD, account=DEFAULT_ACCOUNT): """ :py:func:`asyncio.coroutine` Server authentication. :param user: username :type user: :py:class:`str` :param password: password :type password: :py:class:`str` :param account: account (almost always blank) :type account: :py:class:`str` :raises aioftp.StatusCodeError: if unknown code received """ code, info = await self.command("USER " + user, ("230", "33x")) while code.matches("33x"): censor_after = None if code == "331": cmd = "PASS " + password censor_after = 5 elif code == "332": cmd = "ACCT " + account else: raise errors.StatusCodeError("33x", code, info) code, info = await self.command(cmd, ("230", "33x"), censor_after=censor_after) async def get_current_directory(self): """ :py:func:`asyncio.coroutine` Getting current working directory. :rtype: :py:class:`pathlib.PurePosixPath` """ code, info = await self.command("PWD", "257") directory = self.parse_directory_response(info[-1]) return directory async def change_directory(self, path=".."): """ :py:func:`asyncio.coroutine` Change current directory. Goes «up» if no parameters passed. :param path: new directory, goes «up» if omitted :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ path = pathlib.PurePosixPath(path) if path == pathlib.PurePosixPath(".."): cmd = "CDUP" else: cmd = "CWD " + str(path) await self.command(cmd, "2xx") async def make_directory(self, path, *, parents=True): """ :py:func:`asyncio.coroutine` Make directory. :param path: path to directory to create :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param parents: create parents if does not exists :type parents: :py:class:`bool` """ path = pathlib.PurePosixPath(path) need_create = [] while path.name and not await self.exists(path): need_create.append(path) path = path.parent if not parents: break need_create.reverse() for path in need_create: await self.command("MKD " + str(path), "257") async def remove_directory(self, path): """ :py:func:`asyncio.coroutine` Low level remove method for removing empty directory. :param path: empty directory to remove :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ await self.command("RMD " + str(path), "250") def list(self, path="", *, recursive=False, raw_command=None): """ :py:func:`asyncio.coroutine` List all files and directories in "path". If "path" is a file, then result will be empty :param path: directory :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param recursive: list recursively :type recursive: :py:class:`bool` :param raw_command: optional ftp command to use in place of fallback logic (must be one of "MLSD", "LIST") :type raw_command: :py:class:`str` :rtype: :py:class:`list` or `async for` context :: >>> # lazy list >>> async for path, info in client.list(): ... # no interaction with client should be here(!) >>> # eager list >>> for path, info in (await client.list()): ... # interaction with client allowed, since all paths are ... # collected already :: >>> stats = await client.list() """ class AsyncLister(AsyncListerMixin): stream = None async def _new_stream(cls, local_path): cls.path = local_path cls.parse_line = self.parse_mlsx_line if raw_command not in [None, "MLSD", "LIST"]: raise ValueError("raw_command must be one of MLSD or " f"LIST, but got {raw_command}") if raw_command in [None, "MLSD"]: try: command = ("MLSD " + str(cls.path)).strip() return await self.get_stream(command, "1xx") except errors.StatusCodeError as e: code = e.received_codes[-1] if not code.matches("50x") or raw_command is not None: raise if raw_command in [None, "LIST"]: cls.parse_line = self.parse_list_line command = ("LIST " + str(cls.path)).strip() return await self.get_stream(command, "1xx") def __aiter__(cls): cls.directories = collections.deque() return cls async def __anext__(cls): if cls.stream is None: cls.stream = await cls._new_stream(path) while True: line = await cls.stream.readline() while not line: await cls.stream.finish() if cls.directories: current_path, info = cls.directories.popleft() cls.stream = await cls._new_stream(current_path) line = await cls.stream.readline() else: raise StopAsyncIteration name, info = cls.parse_line(line) stat = cls.path / name, info if info["type"] == "dir" and recursive: cls.directories.append(stat) return stat return AsyncLister() async def stat(self, path): """ :py:func:`asyncio.coroutine` Getting path stats. :param path: path for getting info :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :return: path info :rtype: :py:class:`dict` """ path = pathlib.PurePosixPath(path) try: code, info = await self.command("MLST " + str(path), "2xx") name, info = self.parse_mlsx_line(info[1].lstrip()) return info except errors.StatusCodeError as e: if not e.received_codes[-1].matches("50x"): raise for p, info in await self.list(path.parent): if p.name == path.name: return info else: raise errors.StatusCodeError( Code("2xx"), Code("550"), "path does not exists", ) async def is_file(self, path): """ :py:func:`asyncio.coroutine` Checks if path is file. :param path: path to check :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :rtype: :py:class:`bool` """ info = await self.stat(path) return info["type"] == "file" async def is_dir(self, path): """ :py:func:`asyncio.coroutine` Checks if path is dir. :param path: path to check :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :rtype: :py:class:`bool` """ info = await self.stat(path) return info["type"] == "dir" async def exists(self, path): """ :py:func:`asyncio.coroutine` Check path for existence. :param path: path to check :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :rtype: :py:class:`bool` """ try: await self.stat(path) return True except errors.StatusCodeError as e: if e.received_codes[-1].matches("550"): return False raise async def rename(self, source, destination): """ :py:func:`asyncio.coroutine` Rename (move) file or directory. :param source: path to rename :type source: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param destination: path new name :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ await self.command("RNFR " + str(source), "350") await self.command("RNTO " + str(destination), "2xx") async def remove_file(self, path): """ :py:func:`asyncio.coroutine` Low level remove method for removing file. :param path: file to remove :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ await self.command("DELE " + str(path), "2xx") async def remove(self, path): """ :py:func:`asyncio.coroutine` High level remove method for removing path recursively (file or directory). :param path: path to remove :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` """ if await self.exists(path): info = await self.stat(path) if info["type"] == "file": await self.remove_file(path) elif info["type"] == "dir": for name, info in (await self.list(path)): if info["type"] in ("dir", "file"): await self.remove(name) await self.remove_directory(path) def upload_stream(self, destination, *, offset=0): """ Create stream for write data to `destination` file. :param destination: destination path of file on server side :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param offset: byte offset for stream start position :type offset: :py:class:`int` :rtype: :py:class:`aioftp.DataConnectionThrottleStreamIO` """ return self.get_stream( "STOR " + str(destination), "1xx", offset=offset, ) def append_stream(self, destination, *, offset=0): """ Create stream for append (write) data to `destination` file. :param destination: destination path of file on server side :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param offset: byte offset for stream start position :type offset: :py:class:`int` :rtype: :py:class:`aioftp.DataConnectionThrottleStreamIO` """ return self.get_stream( "APPE " + str(destination), "1xx", offset=offset, ) async def upload(self, source, destination="", *, write_into=False, block_size=DEFAULT_BLOCK_SIZE): """ :py:func:`asyncio.coroutine` High level upload method for uploading files and directories recursively from file system. :param source: source path of file or directory on client side :type source: :py:class:`str` or :py:class:`pathlib.Path` :param destination: destination path of file or directory on server side :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param write_into: write source into destination (if you want upload file and change it name, as well with directories) :type write_into: :py:class:`bool` :param block_size: block size for transaction :type block_size: :py:class:`int` """ source = pathlib.Path(source) destination = pathlib.PurePosixPath(destination) if not write_into: destination = destination / source.name if await self.path_io.is_file(source): await self.make_directory(destination.parent) async with self.path_io.open(source, mode="rb") as file_in, \ self.upload_stream(destination) as stream: async for block in file_in.iter_by_block(block_size): await stream.write(block) elif await self.path_io.is_dir(source): await self.make_directory(destination) sources = collections.deque([source]) while sources: src = sources.popleft() async for path in self.path_io.list(src): if write_into: relative = destination.name / path.relative_to(source) else: relative = path.relative_to(source.parent) if await self.path_io.is_dir(path): await self.make_directory(relative) sources.append(path) else: await self.upload( path, relative, write_into=True, block_size=block_size ) def download_stream(self, source, *, offset=0): """ :py:func:`asyncio.coroutine` Create stream for read data from `source` file. :param source: source path of file on server side :type source: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param offset: byte offset for stream start position :type offset: :py:class:`int` :rtype: :py:class:`aioftp.DataConnectionThrottleStreamIO` """ return self.get_stream("RETR " + str(source), "1xx", offset=offset) async def download(self, source, destination="", *, write_into=False, block_size=DEFAULT_BLOCK_SIZE): """ :py:func:`asyncio.coroutine` High level download method for downloading files and directories recursively and save them to the file system. :param source: source path of file or directory on server side :type source: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param destination: destination path of file or directory on client side :type destination: :py:class:`str` or :py:class:`pathlib.Path` :param write_into: write source into destination (if you want download file and change it name, as well with directories) :type write_into: :py:class:`bool` :param block_size: block size for transaction :type block_size: :py:class:`int` """ source = pathlib.PurePosixPath(source) destination = pathlib.Path(destination) if not write_into: destination = destination / source.name if await self.is_file(source): await self.path_io.mkdir(destination.parent, parents=True, exist_ok=True) async with self.path_io.open(destination, mode="wb") as file_out, \ self.download_stream(source) as stream: async for block in stream.iter_by_block(block_size): await file_out.write(block) elif await self.is_dir(source): await self.path_io.mkdir(destination, parents=True, exist_ok=True) for name, info in (await self.list(source)): full = destination / name.relative_to(source) if info["type"] in ("file", "dir"): await self.download(name, full, write_into=True, block_size=block_size) async def quit(self): """ :py:func:`asyncio.coroutine` Send "QUIT" and close connection. """ await self.command("QUIT", "2xx") self.close() async def _do_epsv(self): code, info = await self.command("EPSV", "229") ip, port = self.parse_epsv_response(info[-1]) return ip, port async def _do_pasv(self): code, info = await self.command("PASV", "227") ip, port = self.parse_pasv_response(info[-1]) return ip, port async def get_passive_connection(self, conn_type="I", commands=None): """ :py:func:`asyncio.coroutine` Getting pair of reader, writer for passive connection with server. :param conn_type: connection type ("I", "A", "E", "L") :type conn_type: :py:class:`str` :param commands: sequence of commands to try to initiate passive server creation. First success wins. Default is EPSV, then PASV. Overwrites the parameters passed when initializing the client. :type commands: :py:class:`list` or :py:class:`None` :rtype: (:py:class:`asyncio.StreamReader`, :py:class:`asyncio.StreamWriter`) """ functions = { "epsv": self._do_epsv, "pasv": self._do_pasv, } if not commands: commands = self._passive_commands if not commands: raise ValueError("No passive commands provided") await self.command("TYPE " + conn_type, "200") for i, name in enumerate(commands, start=1): name = name.lower() if name not in functions: raise ValueError(f"{name!r} not in {set(functions)!r}") try: ip, port = await functions[name]() break except errors.StatusCodeError as e: is_last = i == len(commands) if is_last or not e.received_codes[-1].matches("50x"): raise if ip in ("0.0.0.0", None): ip = self.server_host reader, writer = await self._open_connection(ip, port) return reader, writer @async_enterable async def get_stream(self, *command_args, conn_type="I", offset=0): """ :py:func:`asyncio.coroutine` Create :py:class:`aioftp.DataConnectionThrottleStreamIO` for straight read/write io. :param command_args: arguments for :py:meth:`aioftp.Client.command` :param conn_type: connection type ("I", "A", "E", "L") :type conn_type: :py:class:`str` :param offset: byte offset for stream start position :type offset: :py:class:`int` :rtype: :py:class:`aioftp.DataConnectionThrottleStreamIO` """ reader, writer = await self.get_passive_connection(conn_type) if offset: await self.command("REST " + str(offset), "350") await self.command(*command_args) stream = DataConnectionThrottleStreamIO( self, reader, writer, throttles={"_": self.throttle}, timeout=self.socket_timeout, ) return stream async def abort(self, *, wait=True): """ :py:func:`asyncio.coroutine` Request data transfer abort. :param wait: wait for abort response [426]→226 if `True` :type wait: :py:class:`bool` """ if wait: await self.command("ABOR", "226", "426") else: await self.command("ABOR") @classmethod @contextlib.asynccontextmanager async def context(cls, host, port=DEFAULT_PORT, user=DEFAULT_USER, password=DEFAULT_PASSWORD, account=DEFAULT_ACCOUNT, **kwargs): """ Classmethod async context manager. This create :py:class:`aioftp.Client`, make async call to :py:meth:`aioftp.Client.connect`, :py:meth:`aioftp.Client.login` on enter and :py:meth:`aioftp.Client.quit` on exit. :param host: host name for connection :type host: :py:class:`str` :param port: port number for connection :type port: :py:class:`int` :param user: username :type user: :py:class:`str` :param password: password :type password: :py:class:`str` :param account: account (almost always blank) :type account: :py:class:`str` :param **kwargs: keyword arguments, which passed to :py:class:`aioftp.Client` :: >>> async with aioftp.Client.context("127.0.0.1") as client: ... # do """ client = cls(**kwargs) try: await client.connect(host, port) await client.login(user, password, account) except Exception: client.close() raise try: yield client finally: await client.quit() aioftp-0.21.4/aioftp/common.py000066400000000000000000000353021432163100400162010ustar00rootroot00000000000000import abc import asyncio import collections import functools import locale import threading from contextlib import contextmanager __all__ = ( "with_timeout", "StreamIO", "Throttle", "StreamThrottle", "ThrottleStreamIO", "END_OF_LINE", "DEFAULT_BLOCK_SIZE", "wrap_with_container", "AsyncStreamIterator", "AbstractAsyncLister", "AsyncListerMixin", "async_enterable", "DEFAULT_PORT", "DEFAULT_USER", "DEFAULT_PASSWORD", "DEFAULT_ACCOUNT", "setlocale", ) END_OF_LINE = "\r\n" DEFAULT_BLOCK_SIZE = 8192 DEFAULT_PORT = 21 DEFAULT_USER = "anonymous" DEFAULT_PASSWORD = "anon@" DEFAULT_ACCOUNT = "" HALF_OF_YEAR_IN_SECONDS = 15778476 TWO_YEARS_IN_SECONDS = ((365 * 3 + 366) * 24 * 60 * 60) / 2 def _now(): return asyncio.get_running_loop().time() def _with_timeout(name): def decorator(f): @functools.wraps(f) def wrapper(cls, *args, **kwargs): coro = f(cls, *args, **kwargs) timeout = getattr(cls, name) return asyncio.wait_for(coro, timeout) return wrapper return decorator def with_timeout(name): """ Method decorator, wraps method with :py:func:`asyncio.wait_for`. `timeout` argument takes from `name` decorator argument or "timeout". :param name: name of timeout attribute :type name: :py:class:`str` :raises asyncio.TimeoutError: if coroutine does not finished in timeout Wait for `self.timeout` :: >>> def __init__(self, ...): ... ... self.timeout = 1 ... ... @with_timeout ... async def foo(self, ...): ... ... pass Wait for custom timeout :: >>> def __init__(self, ...): ... ... self.foo_timeout = 1 ... ... @with_timeout("foo_timeout") ... async def foo(self, ...): ... ... pass """ if isinstance(name, str): return _with_timeout(name) else: return _with_timeout("timeout")(name) class AsyncStreamIterator: def __init__(self, read_coro): self.read_coro = read_coro def __aiter__(self): return self async def __anext__(self): data = await self.read_coro() if data: return data else: raise StopAsyncIteration class AsyncListerMixin: """ Add ability to `async for` context to collect data to list via await. :: >>> class Context(AsyncListerMixin): ... ... >>> results = await Context(...) """ async def _to_list(self): items = [] async for item in self: items.append(item) return items def __await__(self): return self._to_list().__await__() class AbstractAsyncLister(AsyncListerMixin, abc.ABC): """ Abstract context with ability to collect all iterables into :py:class:`list` via `await` with optional timeout (via :py:func:`aioftp.with_timeout`) :param timeout: timeout for __anext__ operation :type timeout: :py:class:`None`, :py:class:`int` or :py:class:`float` :: >>> class Lister(AbstractAsyncLister): ... ... @with_timeout ... async def __anext__(self): ... ... :: >>> async for block in Lister(...): ... ... :: >>> result = await Lister(...) >>> result [block, block, block, ...] """ def __init__(self, *, timeout=None): super().__init__() self.timeout = timeout def __aiter__(self): return self @with_timeout @abc.abstractmethod async def __anext__(self): """ :py:func:`asyncio.coroutine` Abstract method """ def async_enterable(f): """ Decorator. Bring coroutine result up, so it can be used as async context :: >>> async def foo(): ... ... ... ... return AsyncContextInstance(...) ... ... ctx = await foo() ... async with ctx: ... ... # do :: >>> @async_enterable ... async def foo(): ... ... ... ... return AsyncContextInstance(...) ... ... async with foo() as ctx: ... ... # do ... ... ctx = await foo() ... async with ctx: ... ... # do """ @functools.wraps(f) def wrapper(*args, **kwargs): class AsyncEnterableInstance: async def __aenter__(self): self.context = await f(*args, **kwargs) return await self.context.__aenter__() async def __aexit__(self, *args, **kwargs): await self.context.__aexit__(*args, **kwargs) def __await__(self): return f(*args, **kwargs).__await__() return AsyncEnterableInstance() return wrapper def wrap_with_container(o): if isinstance(o, str): o = (o,) return o class StreamIO: """ Stream input/output wrapper with timeout. :param reader: stream reader :type reader: :py:class:`asyncio.StreamReader` :param writer: stream writer :type writer: :py:class:`asyncio.StreamWriter` :param timeout: socket timeout for read/write operations :type timeout: :py:class:`int`, :py:class:`float` or :py:class:`None` :param read_timeout: socket timeout for read operations, overrides `timeout` :type read_timeout: :py:class:`int`, :py:class:`float` or :py:class:`None` :param write_timeout: socket timeout for write operations, overrides `timeout` :type write_timeout: :py:class:`int`, :py:class:`float` or :py:class:`None` """ def __init__(self, reader, writer, *, timeout=None, read_timeout=None, write_timeout=None): self.reader = reader self.writer = writer self.read_timeout = read_timeout or timeout self.write_timeout = write_timeout or timeout @with_timeout("read_timeout") async def readline(self): """ :py:func:`asyncio.coroutine` Proxy for :py:meth:`asyncio.StreamReader.readline`. """ return await self.reader.readline() @with_timeout("read_timeout") async def read(self, count=-1): """ :py:func:`asyncio.coroutine` Proxy for :py:meth:`asyncio.StreamReader.read`. :param count: block size for read operation :type count: :py:class:`int` """ return await self.reader.read(count) @with_timeout("read_timeout") async def readexactly(self, count): """ :py:func:`asyncio.coroutine` Proxy for :py:meth:`asyncio.StreamReader.readexactly`. :param count: block size for read operation :type count: :py:class:`int` """ return await self.reader.readexactly(count) @with_timeout("write_timeout") async def write(self, data): """ :py:func:`asyncio.coroutine` Combination of :py:meth:`asyncio.StreamWriter.write` and :py:meth:`asyncio.StreamWriter.drain`. :param data: data to write :type data: :py:class:`bytes` """ self.writer.write(data) await self.writer.drain() def close(self): """ Close connection. """ self.writer.close() class Throttle: """ Throttle for streams. :param limit: speed limit in bytes or :py:class:`None` for unlimited :type limit: :py:class:`int` or :py:class:`None` :param reset_rate: time in seconds for «round» throttle memory (to deal with float precision when divide) :type reset_rate: :py:class:`int` or :py:class:`float` """ def __init__(self, *, limit=None, reset_rate=10): self._limit = limit self.reset_rate = reset_rate self._start = None self._sum = 0 async def wait(self): """ :py:func:`asyncio.coroutine` Wait until can do IO """ if self._limit is not None and self._limit > 0 and \ self._start is not None: now = _now() end = self._start + self._sum / self._limit await asyncio.sleep(max(0, end - now)) def append(self, data, start): """ Count `data` for throttle :param data: bytes of data for count :type data: :py:class:`bytes` :param start: start of read/write time from :py:meth:`asyncio.BaseEventLoop.time` :type start: :py:class:`float` """ if self._limit is not None and self._limit > 0: if self._start is None: self._start = start if start - self._start > self.reset_rate: self._sum -= round((start - self._start) * self._limit) self._start = start self._sum += len(data) @property def limit(self): """ Throttle limit """ return self._limit @limit.setter def limit(self, value): """ Set throttle limit :param value: bytes per second :type value: :py:class:`int` or :py:class:`None` """ self._limit = value self._start = None self._sum = 0 def clone(self): """ Clone throttle without memory """ return Throttle(limit=self._limit, reset_rate=self.reset_rate) def __repr__(self): return f"{self.__class__.__name__}(limit={self._limit!r}, " \ f"reset_rate={self.reset_rate!r})" class StreamThrottle(collections.namedtuple("StreamThrottle", "read write")): """ Stream throttle with `read` and `write` :py:class:`aioftp.Throttle` :param read: stream read throttle :type read: :py:class:`aioftp.Throttle` :param write: stream write throttle :type write: :py:class:`aioftp.Throttle` """ def clone(self): """ Clone throttles without memory """ return StreamThrottle( read=self.read.clone(), write=self.write.clone() ) @classmethod def from_limits(cls, read_speed_limit=None, write_speed_limit=None): """ Simple wrapper for creation :py:class:`aioftp.StreamThrottle` :param read_speed_limit: stream read speed limit in bytes or :py:class:`None` for unlimited :type read_speed_limit: :py:class:`int` or :py:class:`None` :param write_speed_limit: stream write speed limit in bytes or :py:class:`None` for unlimited :type write_speed_limit: :py:class:`int` or :py:class:`None` """ return cls(read=Throttle(limit=read_speed_limit), write=Throttle(limit=write_speed_limit)) class ThrottleStreamIO(StreamIO): """ Throttled :py:class:`aioftp.StreamIO`. `ThrottleStreamIO` is subclass of :py:class:`aioftp.StreamIO`. `throttles` attribute is dictionary of `name`: :py:class:`aioftp.StreamThrottle` pairs :param *args: positional arguments for :py:class:`aioftp.StreamIO` :param **kwargs: keyword arguments for :py:class:`aioftp.StreamIO` :param throttles: dictionary of throttles :type throttles: :py:class:`dict` with :py:class:`aioftp.Throttle` values :: >>> self.stream = ThrottleStreamIO( ... reader, ... writer, ... throttles={ ... "main": StreamThrottle( ... read=Throttle(...), ... write=Throttle(...) ... ) ... }, ... timeout=timeout ... ) """ def __init__(self, *args, throttles={}, **kwargs): super().__init__(*args, **kwargs) self.throttles = throttles async def wait(self, name): """ :py:func:`asyncio.coroutine` Wait for all throttles :param name: name of throttle to acquire ("read" or "write") :type name: :py:class:`str` """ tasks = [] for throttle in self.throttles.values(): curr_throttle = getattr(throttle, name) if curr_throttle.limit: tasks.append(asyncio.create_task(curr_throttle.wait())) if tasks: await asyncio.wait(tasks) def append(self, name, data, start): """ Update timeout for all throttles :param name: name of throttle to append to ("read" or "write") :type name: :py:class:`str` :param data: bytes of data for count :type data: :py:class:`bytes` :param start: start of read/write time from :py:meth:`asyncio.BaseEventLoop.time` :type start: :py:class:`float` """ for throttle in self.throttles.values(): getattr(throttle, name).append(data, start) async def read(self, count=-1): """ :py:func:`asyncio.coroutine` :py:meth:`aioftp.StreamIO.read` proxy """ await self.wait("read") start = _now() data = await super().read(count) self.append("read", data, start) return data async def readline(self): """ :py:func:`asyncio.coroutine` :py:meth:`aioftp.StreamIO.readline` proxy """ await self.wait("read") start = _now() data = await super().readline() self.append("read", data, start) return data async def write(self, data): """ :py:func:`asyncio.coroutine` :py:meth:`aioftp.StreamIO.write` proxy """ await self.wait("write") start = _now() await super().write(data) self.append("write", data, start) async def __aenter__(self): return self async def __aexit__(self, *args): self.close() def iter_by_line(self): """ Read/iterate stream by line. :rtype: :py:class:`aioftp.AsyncStreamIterator` :: >>> async for line in stream.iter_by_line(): ... ... """ return AsyncStreamIterator(self.readline) def iter_by_block(self, count=DEFAULT_BLOCK_SIZE): """ Read/iterate stream by block. :rtype: :py:class:`aioftp.AsyncStreamIterator` :: >>> async for block in stream.iter_by_block(block_size): ... ... """ return AsyncStreamIterator(lambda: self.read(count)) LOCALE_LOCK = threading.Lock() @contextmanager def setlocale(name): """ Context manager with threading lock for set locale on enter, and set it back to original state on exit. :: >>> with setlocale("C"): ... ... """ with LOCALE_LOCK: old_locale = locale.setlocale(locale.LC_ALL) try: yield locale.setlocale(locale.LC_ALL, name) finally: locale.setlocale(locale.LC_ALL, old_locale) aioftp-0.21.4/aioftp/errors.py000066400000000000000000000041461432163100400162270ustar00rootroot00000000000000from . import common __all__ = ( "AIOFTPException", "StatusCodeError", "PathIsNotAbsolute", "PathIOError", "NoAvailablePort", ) class AIOFTPException(Exception): """ Base exception class. """ class StatusCodeError(AIOFTPException): """ Raised for unexpected or "bad" status codes. :param expected_codes: tuple of expected codes or expected code :type expected_codes: :py:class:`tuple` of :py:class:`aioftp.Code` or :py:class:`aioftp.Code` :param received_codes: tuple of received codes or received code :type received_codes: :py:class:`tuple` of :py:class:`aioftp.Code` or :py:class:`aioftp.Code` :param info: list of lines with server response :type info: :py:class:`list` of :py:class:`str` :: >>> try: ... # something with aioftp ... except StatusCodeError as e: ... print(e.expected_codes, e.received_codes, e.info) ... # analyze state Exception members are tuples, even for one code. """ def __init__(self, expected_codes, received_codes, info): super().__init__(f"Waiting for {expected_codes} but got " f"{received_codes} {info!r}") self.expected_codes = common.wrap_with_container(expected_codes) self.received_codes = common.wrap_with_container(received_codes) self.info = info class PathIsNotAbsolute(AIOFTPException): """ Raised when "path" is not absolute. """ class PathIOError(AIOFTPException): """ Universal exception for any path io errors. :: >>> try: ... # some client/server path operation ... except PathIOError as exc: ... type, value, traceback = exc.reason ... if isinstance(value, SomeException): ... # handle ... elif ... ... # handle """ def __init__(self, *args, reason=None, **kwargs): super().__init__(*args, **kwargs) self.reason = reason class NoAvailablePort(AIOFTPException, OSError): """ Raised when there is no available data port """ aioftp-0.21.4/aioftp/pathio.py000066400000000000000000000551651432163100400162060ustar00rootroot00000000000000import abc import asyncio import collections import functools import io import operator import pathlib import stat import sys import time from . import errors from .common import ( DEFAULT_BLOCK_SIZE, AbstractAsyncLister, AsyncStreamIterator, with_timeout, ) __all__ = ( "AbstractPathIO", "PathIO", "AsyncPathIO", "MemoryPathIO", "PathIONursery", ) class AsyncPathIOContext: """ Async pathio context. Usage: :: >>> async with pathio.open(filename) as file_in: ... async for block in file_in.iter_by_block(size): ... # do or borring: :: >>> file = await pathio.open(filename) ... data = await file.read(size) ... await file.write(data) ... await file.close() """ def __init__(self, pathio, args, kwargs): self.close = None self.pathio = pathio self.args = args self.kwargs = kwargs async def __aenter__(self): self.file = await self.pathio._open(*self.args, **self.kwargs) self.seek = functools.partial(self.pathio.seek, self.file) self.write = functools.partial(self.pathio.write, self.file) self.read = functools.partial(self.pathio.read, self.file) self.close = functools.partial(self.pathio.close, self.file) return self async def __aexit__(self, *args): if self.close is not None: await self.close() def __await__(self): return self.__aenter__().__await__() def iter_by_block(self, count=DEFAULT_BLOCK_SIZE): return AsyncStreamIterator(lambda: self.read(count)) def universal_exception(coro): """ Decorator. Reraising any exception (except `CancelledError` and `NotImplementedError`) with universal exception :py:class:`aioftp.PathIOError` """ @functools.wraps(coro) async def wrapper(*args, **kwargs): try: return await coro(*args, **kwargs) except (asyncio.CancelledError, NotImplementedError, StopAsyncIteration): raise except Exception as exc: raise errors.PathIOError(reason=sys.exc_info()) from exc return wrapper class PathIONursery: def __init__(self, factory): self.factory = factory self.state = None def __call__(self, *args, **kwargs): instance = self.factory(*args, state=self.state, **kwargs) if self.state is None: self.state = instance.state return instance def defend_file_methods(coro): """ Decorator. Raises exception when file methods called with wrapped by :py:class:`aioftp.AsyncPathIOContext` file object. """ @functools.wraps(coro) async def wrapper(self, file, *args, **kwargs): if isinstance(file, AsyncPathIOContext): raise ValueError("Native path io file methods can not be used " "with wrapped file object") return await coro(self, file, *args, **kwargs) return wrapper class AbstractPathIO(abc.ABC): """ Abstract class for path io operations. :param timeout: timeout used by `with_timeout` decorator :type timeout: :py:class:`float`, :py:class:`int` or `None` :param connection: server connection that is accessing this PathIO :type connection: :py:class:`aioftp.Connection` :param state: shared pathio state per server """ def __init__(self, timeout=None, connection=None, state=None): self.timeout = timeout self.connection = connection @property def state(self): """ Shared pathio state per server """ @universal_exception @abc.abstractmethod async def exists(self, path): """ :py:func:`asyncio.coroutine` Check if path exists :param path: path to check :type path: :py:class:`pathlib.Path` :rtype: :py:class:`bool` """ @universal_exception @abc.abstractmethod async def is_dir(self, path): """ :py:func:`asyncio.coroutine` Check if path is directory :param path: path to check :type path: :py:class:`pathlib.Path` :rtype: :py:class:`bool` """ @universal_exception @abc.abstractmethod async def is_file(self, path): """ :py:func:`asyncio.coroutine` Check if path is file :param path: path to check :type path: :py:class:`pathlib.Path` :rtype: :py:class:`bool` """ @universal_exception @abc.abstractmethod async def mkdir(self, path, *, parents=False, exist_ok=False): """ :py:func:`asyncio.coroutine` Make directory :param path: path to create :type path: :py:class:`pathlib.Path` :param parents: create parents is does not exists :type parents: :py:class:`bool` :param exist_ok: do not raise exception if directory already exists :type exist_ok: :py:class:`bool` """ @universal_exception @abc.abstractmethod async def rmdir(self, path): """ :py:func:`asyncio.coroutine` Remove directory :param path: path to remove :type path: :py:class:`pathlib.Path` """ @universal_exception @abc.abstractmethod async def unlink(self, path): """ :py:func:`asyncio.coroutine` Remove file :param path: path to remove :type path: :py:class:`pathlib.Path` """ @abc.abstractmethod def list(self, path): """ Create instance of subclass of :py:class:`aioftp.AbstractAsyncLister`. You should subclass and implement `__anext__` method for :py:class:`aioftp.AbstractAsyncLister` and return new instance. :param path: path to list :type path: :py:class:`pathlib.Path` :rtype: :py:class:`aioftp.AbstractAsyncLister` Usage: :: >>> async for p in pathio.list(path): ... # do or borring instance of :py:class:`list`: :: >>> paths = await pathio.list(path) >>> paths [path, path, path, ...] """ @universal_exception @abc.abstractmethod async def stat(self, path): """ :py:func:`asyncio.coroutine` Get path stats :param path: path, which stats need :type path: :py:class:`pathlib.Path` :return: path stats. For proper work you need only this stats: st_size, st_mtime, st_ctime, st_nlink, st_mode :rtype: same as :py:class:`os.stat_result` """ @universal_exception @abc.abstractmethod async def _open(self, path, mode): """ :py:func:`asyncio.coroutine` Open file. You should implement "mode" argument, which can be: "rb", "wb", "ab" (read, write, append. all binary). Return type depends on implementation, anyway the only place you need this file-object is in your implementation of read, write and close :param path: path to create :type path: :py:class:`pathlib.Path` :param mode: specifies the mode in which the file is opened ("rb", "wb", "ab", "r+b" (read, write, append, read/write, all binary)) :type mode: :py:class:`str` :return: file-object """ def open(self, *args, **kwargs): """ Create instance of :py:class:`aioftp.pathio.AsyncPathIOContext`, parameters passed to :py:meth:`aioftp.AbstractPathIO._open` :rtype: :py:class:`aioftp.pathio.AsyncPathIOContext` """ return AsyncPathIOContext(self, args, kwargs) @universal_exception @defend_file_methods @abc.abstractmethod async def seek(self, file, offset, whence=io.SEEK_SET): """ :py:func:`asyncio.coroutine` Change the stream position to the given byte `offset`. Same behaviour as :py:meth:`io.IOBase.seek` :param file: file-object from :py:class:`aioftp.AbstractPathIO.open` :param offset: relative byte offset :type offset: :py:class:`int` :param whence: base position for offset :type whence: :py:class:`int` """ @universal_exception @defend_file_methods @abc.abstractmethod async def write(self, file, data): """ :py:func:`asyncio.coroutine` Write some data to file :param file: file-object from :py:class:`aioftp.AbstractPathIO.open` :param data: data to write :type data: :py:class:`bytes` """ @universal_exception @defend_file_methods @abc.abstractmethod async def read(self, file, block_size): """ :py:func:`asyncio.coroutine` Read some data from file :param file: file-object from :py:class:`aioftp.AbstractPathIO.open` :param block_size: bytes count to read :type block_size: :py:class:`int` :rtype: :py:class:`bytes` """ @universal_exception @defend_file_methods @abc.abstractmethod async def close(self, file): """ :py:func:`asyncio.coroutine` Close file :param file: file-object from :py:class:`aioftp.AbstractPathIO.open` """ @universal_exception @abc.abstractmethod async def rename(self, source, destination): """ :py:func:`asyncio.coroutine` Rename path :param source: rename from :type source: :py:class:`pathlib.Path` :param destination: rename to :type destination: :py:class:`pathlib.Path` """ class PathIO(AbstractPathIO): """ Blocking path io. Directly based on :py:class:`pathlib.Path` methods. """ @universal_exception async def exists(self, path): return path.exists() @universal_exception async def is_dir(self, path): return path.is_dir() @universal_exception async def is_file(self, path): return path.is_file() @universal_exception async def mkdir(self, path, *, parents=False, exist_ok=False): return path.mkdir(parents=parents, exist_ok=exist_ok) @universal_exception async def rmdir(self, path): return path.rmdir() @universal_exception async def unlink(self, path): return path.unlink() def list(self, path): class Lister(AbstractAsyncLister): iter = None @universal_exception async def __anext__(self): if self.iter is None: self.iter = path.glob("*") try: return next(self.iter) except StopIteration: raise StopAsyncIteration return Lister(timeout=self.timeout) @universal_exception async def stat(self, path): return path.stat() @universal_exception async def _open(self, path, *args, **kwargs): return path.open(*args, **kwargs) @universal_exception @defend_file_methods async def seek(self, file, *args, **kwargs): return file.seek(*args, **kwargs) @universal_exception @defend_file_methods async def write(self, file, *args, **kwargs): return file.write(*args, **kwargs) @universal_exception @defend_file_methods async def read(self, file, *args, **kwargs): return file.read(*args, **kwargs) @universal_exception @defend_file_methods async def close(self, file): return file.close() @universal_exception async def rename(self, source, destination): return source.rename(destination) def _blocking_io(f): @functools.wraps(f) async def wrapper(self, *args, **kwargs): return await asyncio.get_running_loop().run_in_executor( self.executor, functools.partial(f, self, *args, **kwargs), ) return wrapper class AsyncPathIO(AbstractPathIO): """ Non-blocking path io. Based on :py:meth:`asyncio.BaseEventLoop.run_in_executor` and :py:class:`pathlib.Path` methods. It's really slow, so it's better to avoid usage of this path io layer. :param executor: executor for running blocking tasks :type executor: :py:class:`concurrent.futures.Executor` """ def __init__(self, *args, executor=None, **kwargs): super().__init__(*args, **kwargs) self.executor = executor @universal_exception @with_timeout @_blocking_io def exists(self, path): return path.exists() @universal_exception @with_timeout @_blocking_io def is_dir(self, path): return path.is_dir() @universal_exception @with_timeout @_blocking_io def is_file(self, path): return path.is_file() @universal_exception @with_timeout @_blocking_io def mkdir(self, path, *, parents=False, exist_ok=False): return path.mkdir(parents=parents, exist_ok=exist_ok) @universal_exception @with_timeout @_blocking_io def rmdir(self, path): return path.rmdir() @universal_exception @with_timeout @_blocking_io def unlink(self, path): return path.unlink() def list(self, path): class Lister(AbstractAsyncLister): iter = None def __init__(self, *args, executor=None, **kwargs): super().__init__(*args, **kwargs) self.executor = executor def worker(self): try: return next(self.iter) except StopIteration: raise StopAsyncIteration @universal_exception @with_timeout @_blocking_io def __anext__(self): if self.iter is None: self.iter = path.glob("*") return self.worker() return Lister(timeout=self.timeout, executor=self.executor) @universal_exception @with_timeout @_blocking_io def stat(self, path): return path.stat() @universal_exception @with_timeout @_blocking_io def _open(self, path, *args, **kwargs): return path.open(*args, **kwargs) @universal_exception @defend_file_methods @with_timeout @_blocking_io def seek(self, file, *args, **kwargs): return file.seek(*args, **kwargs) @universal_exception @defend_file_methods @with_timeout @_blocking_io def write(self, file, *args, **kwargs): return file.write(*args, **kwargs) @universal_exception @defend_file_methods @with_timeout @_blocking_io def read(self, file, *args, **kwargs): return file.read(*args, **kwargs) @universal_exception @defend_file_methods @with_timeout @_blocking_io def close(self, file): return file.close() @universal_exception @with_timeout @_blocking_io def rename(self, source, destination): return source.rename(destination) class Node: def __init__(self, type, name, ctime=None, mtime=None, *, content): self.type = type self.name = name self.ctime = ctime or int(time.time()) self.mtime = mtime or int(time.time()) self.content = content def __repr__(self): return f"{self.__class__.__name__}(type={self.type!r}, " \ f"name={self.name!r}, ctime={self.ctime!r}, " \ f"mtime={self.mtime!r}, content={self.content!r})" class MemoryPathIO(AbstractPathIO): """ Non-blocking path io. Based on in-memory tree. It is just proof of concept and probably not so fast as it can be. """ Stats = collections.namedtuple( "Stats", ( "st_size", "st_ctime", "st_mtime", "st_nlink", "st_mode", ) ) def __init__(self, *args, state=None, cwd=None, **kwargs): super().__init__(*args, **kwargs) self.cwd = pathlib.PurePosixPath(cwd or "/") if state is None: self.fs = [Node("dir", "/", content=[])] else: self.fs = state @property def state(self): return self.fs def __repr__(self): return repr(self.fs) def _absolute(self, path): if not path.is_absolute(): path = self.cwd / path return path def get_node(self, path): nodes = self.fs node = None path = self._absolute(path) for part in path.parts: if not isinstance(nodes, list): return for node in nodes: if node.name == part: nodes = node.content break else: return return node @universal_exception async def exists(self, path): return self.get_node(path) is not None @universal_exception async def is_dir(self, path): node = self.get_node(path) return not (node is None or node.type != "dir") @universal_exception async def is_file(self, path): node = self.get_node(path) return not (node is None or node.type != "file") @universal_exception async def mkdir(self, path, *, parents=False, exist_ok=False): path = self._absolute(path) node = self.get_node(path) if node: if node.type != "dir" or not exist_ok: raise FileExistsError elif not parents: parent = self.get_node(path.parent) if parent is None: raise FileNotFoundError if parent.type != "dir": raise NotADirectoryError node = Node("dir", path.name, content=[]) parent.content.append(node) else: nodes = self.fs for part in path.parts: if isinstance(nodes, list): for node in nodes: if node.name == part: nodes = node.content break else: node = Node("dir", part, content=[]) nodes.append(node) nodes = node.content else: raise NotADirectoryError @universal_exception async def rmdir(self, path): node = self.get_node(path) if node is None: raise FileNotFoundError if node.type != "dir": raise NotADirectoryError if node.content: raise OSError("Directory not empty") parent = self.get_node(path.parent) for i, node in enumerate(parent.content): if node.name == path.name: break parent.content.pop(i) @universal_exception async def unlink(self, path): node = self.get_node(path) if node is None: raise FileNotFoundError if node.type != "file": raise IsADirectoryError parent = self.get_node(path.parent) for i, node in enumerate(parent.content): if node.name == path.name: break parent.content.pop(i) def list(self, path): class Lister(AbstractAsyncLister): iter = None @universal_exception async def __anext__(cls): if cls.iter is None: node = self.get_node(path) if node is None or node.type != "dir": cls.iter = iter(()) else: names = map(operator.attrgetter("name"), node.content) paths = map(lambda name: path / name, names) cls.iter = iter(paths) try: return next(cls.iter) except StopIteration: raise StopAsyncIteration return Lister(timeout=self.timeout) @universal_exception async def stat(self, path): node = self.get_node(path) if node is None: raise FileNotFoundError if node.type == "file": size = len(node.content.getbuffer()) mode = stat.S_IFREG | 0o666 else: size = 0 mode = stat.S_IFDIR | 0o777 return MemoryPathIO.Stats( size, node.ctime, node.mtime, 1, mode, ) @universal_exception async def _open(self, path, mode="rb", *args, **kwargs): if mode == "rb": node = self.get_node(path) if node is None: raise FileNotFoundError file_like = node.content file_like.seek(0, io.SEEK_SET) elif mode in ("wb", "ab", "r+b"): node = self.get_node(path) if node is None: parent = self.get_node(path.parent) if parent is None or parent.type != "dir": raise FileNotFoundError new_node = Node("file", path.name, content=io.BytesIO()) parent.content.append(new_node) file_like = new_node.content elif node.type != "file": raise IsADirectoryError else: if mode == "wb": file_like = node.content = io.BytesIO() elif mode == "ab": file_like = node.content file_like.seek(0, io.SEEK_END) elif mode == "r+b": file_like = node.content file_like.seek(0, io.SEEK_SET) else: raise ValueError(f"invalid mode: {mode}") return file_like @universal_exception @defend_file_methods async def seek(self, file, *args, **kwargs): return file.seek(*args, **kwargs) @universal_exception @defend_file_methods async def write(self, file, *args, **kwargs): file.write(*args, **kwargs) file.mtime = int(time.time()) @universal_exception @defend_file_methods async def read(self, file, *args, **kwargs): return file.read(*args, **kwargs) @universal_exception @defend_file_methods async def close(self, file): pass @universal_exception async def rename(self, source, destination): if source != destination: sparent = self.get_node(source.parent) dparent = self.get_node(destination.parent) snode = self.get_node(source) if None in (snode, dparent): raise FileNotFoundError for i, node in enumerate(sparent.content): if node.name == source.name: sparent.content.pop(i) snode.name = destination.name for i, node in enumerate(dparent.content): if node.name == destination.name: dparent.content[i] = snode break else: dparent.content.append(snode) aioftp-0.21.4/aioftp/server.py000066400000000000000000001542231432163100400162230ustar00rootroot00000000000000import abc import asyncio import collections import enum import errno import functools import logging import pathlib import socket import stat import sys import time from . import errors, pathio from .common import ( DEFAULT_BLOCK_SIZE, END_OF_LINE, HALF_OF_YEAR_IN_SECONDS, StreamThrottle, ThrottleStreamIO, setlocale, wrap_with_container, ) __all__ = ( "Permission", "User", "AbstractUserManager", "MemoryUserManager", "Connection", "AvailableConnections", "ConnectionConditions", "PathConditions", "PathPermissions", "worker", "Server", ) IS_PY37_PLUS = sys.version_info[:2] >= (3, 7) if IS_PY37_PLUS: get_current_task = asyncio.current_task else: get_current_task = asyncio.Task.current_task logger = logging.getLogger(__name__) class Permission: """ Path permission :param path: path :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param readable: is readable :type readable: :py:class:`bool` :param writable: is writable :type writable: :py:class:`bool` """ def __init__(self, path="/", *, readable=True, writable=True): self.path = pathlib.PurePosixPath(path) self.readable = readable self.writable = writable def is_parent(self, other): try: other.relative_to(self.path) return True except ValueError: return False def __repr__(self): return f"{self.__class__.__name__}({self.path!r}, " \ f"readable={self.readable!r}, writable={self.writable!r})" class User: """ User description. :param login: user login :type login: :py:class:`str` :param password: user password :type password: :py:class:`str` :param base_path: real user path for file io operations :type base_path: :py:class:`str` or :py:class:`pathlib.Path` :param home_path: virtual user path for client representation (must be absolute) :type home_path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param permissions: list of path permissions :type permissions: :py:class:`tuple` or :py:class:`list` of :py:class:`aioftp.Permission` :param maximum_connections: Maximum connections per user :type maximum_connections: :py:class:`int` :param read_speed_limit: read speed limit per user in bytes per second :type read_speed_limit: :py:class:`int` or :py:class:`None` :param write_speed_limit: write speed limit per user in bytes per second :type write_speed_limit: :py:class:`int` or :py:class:`None` :param read_speed_limit_per_connection: read speed limit per user connection in bytes per second :type read_speed_limit_per_connection: :py:class:`int` or :py:class:`None` :param write_speed_limit_per_connection: write speed limit per user connection in bytes per second :type write_speed_limit_per_connection: :py:class:`int` or :py:class:`None` """ def __init__(self, login=None, password=None, *, base_path=pathlib.Path("."), home_path=pathlib.PurePosixPath("/"), permissions=None, maximum_connections=None, read_speed_limit=None, write_speed_limit=None, read_speed_limit_per_connection=None, write_speed_limit_per_connection=None): self.login = login self.password = password self.base_path = pathlib.Path(base_path) self.home_path = pathlib.PurePosixPath(home_path) if not self.home_path.is_absolute(): raise errors.PathIsNotAbsolute(home_path) self.permissions = permissions or [Permission()] self.maximum_connections = maximum_connections self.read_speed_limit = read_speed_limit self.write_speed_limit = write_speed_limit self.read_speed_limit_per_connection = read_speed_limit_per_connection # damn 80 symbols self.write_speed_limit_per_connection = \ write_speed_limit_per_connection async def get_permissions(self, path): """ Return nearest parent permission for `path`. :param path: path which permission you want to know :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :rtype: :py:class:`aioftp.Permission` """ path = pathlib.PurePosixPath(path) parents = filter(lambda p: p.is_parent(path), self.permissions) perm = min( parents, key=lambda p: len(path.relative_to(p.path).parts), default=Permission(), ) return perm def __repr__(self): return f"{self.__class__.__name__}({self.login!r}, " \ f"{self.password!r}, base_path={self.base_path!r}, " \ f"home_path={self.home_path!r}, " \ f"permissions={self.permissions!r}, " \ f"maximum_connections={self.maximum_connections!r}, " \ f"read_speed_limit={self.read_speed_limit!r}, " \ f"write_speed_limit={self.write_speed_limit!r}, " \ f"read_speed_limit_per_connection=" \ f"{self.read_speed_limit_per_connection!r}, " \ f"write_speed_limit_per_connection=" \ f"{self.write_speed_limit_per_connection!r})" class AbstractUserManager(abc.ABC): """ Abstract user manager. :param timeout: timeout used by `with_timeout` decorator :type timeout: :py:class:`float`, :py:class:`int` or :py:class:`None` """ GetUserResponse = enum.Enum( "UserManagerResponse", "OK PASSWORD_REQUIRED ERROR" ) def __init__(self, *, timeout=None): self.timeout = timeout @abc.abstractmethod async def get_user(self, login): """ :py:func:`asyncio.coroutine` Get user and response for USER call :param login: user's login :type login: :py:class:`str` """ @abc.abstractmethod async def authenticate(self, user, password): """ :py:func:`asyncio.coroutine` Check if user can be authenticated with provided password :param user: user :type user: :py:class:`aioftp.User` :param password: password :type password: :py:class:`str` :rtype: :py:class:`bool` """ async def notify_logout(self, user): """ :py:func:`asyncio.coroutine` Called when user connection is closed if user was initiated :param user: user :type user: :py:class:`aioftp.User` """ class MemoryUserManager(AbstractUserManager): """ A built-in user manager that keeps predefined set of users in memory. :param users: container of users :type users: :py:class:`list`, :py:class:`tuple`, etc. of :py:class:`aioftp.User` """ def __init__(self, users, *args, **kwargs): super().__init__(*args, **kwargs) self.users = users or [User()] self.available_connections = dict( (user, AvailableConnections(user.maximum_connections)) for user in self.users ) async def get_user(self, login): user = None for u in self.users: if u.login is None and user is None: user = u elif u.login == login: user = u break if user is None: state = AbstractUserManager.GetUserResponse.ERROR info = "no such username" elif self.available_connections[user].locked(): state = AbstractUserManager.GetUserResponse.ERROR info = f"too much connections for {user.login or 'anonymous'!r}" elif user.login is None: state = AbstractUserManager.GetUserResponse.OK info = "anonymous login" elif user.password is None: state = AbstractUserManager.GetUserResponse.OK info = "login without password" else: state = AbstractUserManager.GetUserResponse.PASSWORD_REQUIRED info = "password required" if state != AbstractUserManager.GetUserResponse.ERROR: self.available_connections[user].acquire() return state, user, info async def authenticate(self, user, password): return user.password == password async def notify_logout(self, user): self.available_connections[user].release() class Connection(collections.defaultdict): """ Connection state container for transparent work with futures for async wait :param kwargs: initialization parameters Container based on :py:class:`collections.defaultdict`, which holds :py:class:`asyncio.Future` as default factory. There is two layers of abstraction: * Low level based on simple dictionary keys to attributes mapping and available at Connection.future. * High level based on futures result and dictionary keys to attributes mapping and available at Connection. To clarify, here is groups of equal expressions :: >>> connection.future.foo >>> connection["foo"] >>> connection.foo >>> connection["foo"].result() >>> del connection.future.foo >>> del connection.foo >>> del connection["foo"] """ __slots__ = ("future",) class Container: def __init__(self, storage): self.storage = storage def __getattr__(self, name): return self.storage[name] def __delattr__(self, name): self.storage.pop(name) def __init__(self, **kwargs): super().__init__(asyncio.Future) self.future = Connection.Container(self) for k, v in kwargs.items(): self[k].set_result(v) def __getattr__(self, name): if name in self: return self[name].result() else: raise AttributeError(f"{name!r} not in storage") def __setattr__(self, name, value): if name in Connection.__slots__: super().__setattr__(name, value) else: if self[name].done(): self[name] = super().default_factory() self[name].set_result(value) def __delattr__(self, name): if name in self: self.pop(name) class AvailableConnections: """ Semaphore-like object. Have no blocks, only raises ValueError on bounds crossing. If value is :py:class:`None` have no limits (bounds checks). :param value: :type value: :py:class:`int` or :py:class:`None` """ def __init__(self, value=None): self.value = self.maximum_value = value def locked(self): """ Returns True if semaphore-like can not be acquired. :rtype: :py:class:`bool` """ return self.value == 0 def acquire(self): """ Acquire, decrementing the internal counter by one. """ if self.value is not None: self.value -= 1 if self.value < 0: raise ValueError("Too many acquires") def release(self): """ Release, incrementing the internal counter by one. """ if self.value is not None: self.value += 1 if self.value > self.maximum_value: raise ValueError("Too many releases") class ConnectionConditions: """ Decorator for checking `connection` keys for existence or wait for them. Available options: :param fields: * `ConnectionConditions.user_required` — required "user" key, user already identified * `ConnectionConditions.login_required` — required "logged" key, user already logged in. * `ConnectionConditions.passive_server_started` — required "passive_server" key, user already send PASV and server awaits incomming connection * `ConnectionConditions.data_connection_made` — required "data_connection" key, user already connected to passive connection * `ConnectionConditions.rename_from_required` — required "rename_from" key, user already tell filename for rename :param wait: Indicates if should wait for parameters for `connection.wait_future_timeout` :type wait: :py:class:`bool` :param fail_code: return code if failure :type fail_code: :py:class:`str` :param fail_info: return information string if failure. If :py:class:`None`, then use default string :type fail_info: :py:class:`str` :: >>> @ConnectionConditions( ... ConnectionConditions.login_required, ... ConnectionConditions.passive_server_started, ... ConnectionConditions.data_connection_made, ... wait=True) ... def foo(self, connection, rest): ... ... """ user_required = ("user", "no user (use USER firstly)") login_required = ("logged", "not logged in") passive_server_started = ( "passive_server", "no listen socket created (use PASV firstly)" ) data_connection_made = ("data_connection", "no data connection made") rename_from_required = ("rename_from", "no filename (use RNFR firstly)") def __init__(self, *fields, wait=False, fail_code="503", fail_info=None): self.fields = fields self.wait = wait self.fail_code = fail_code self.fail_info = fail_info def __call__(self, f): @functools.wraps(f) async def wrapper(cls, connection, rest, *args): futures = {connection[name]: msg for name, msg in self.fields} aggregate = asyncio.gather(*futures) if self.wait: timeout = connection.wait_future_timeout else: timeout = 0 try: await asyncio.wait_for( asyncio.shield(aggregate), timeout, ) except asyncio.TimeoutError: for future, message in futures.items(): if not future.done(): if self.fail_info is None: info = f"bad sequence of commands ({message})" else: info = self.fail_info connection.response(self.fail_code, info) return True return await f(cls, connection, rest, *args) return wrapper class PathConditions: """ Decorator for checking paths. Available options: * `path_must_exists` * `path_must_not_exists` * `path_must_be_dir` * `path_must_be_file` :: >>> @PathConditions( ... PathConditions.path_must_exists, ... PathConditions.path_must_be_dir) ... def foo(self, connection, path): ... ... """ path_must_exists = ("exists", False, "path does not exists") path_must_not_exists = ("exists", True, "path already exists") path_must_be_dir = ("is_dir", False, "path is not a directory") path_must_be_file = ("is_file", False, "path is not a file") def __init__(self, *conditions): self.conditions = conditions def __call__(self, f): @functools.wraps(f) async def wrapper(cls, connection, rest, *args): real_path, virtual_path = cls.get_paths(connection, rest) for name, fail, message in self.conditions: coro = getattr(connection.path_io, name) if await coro(real_path) == fail: connection.response("550", message) return True return await f(cls, connection, rest, *args) return wrapper class PathPermissions: """ Decorator for checking path permissions. There is two permissions right now: * `PathPermissions.readable` * `PathPermissions.writable` Decorator will check the permissions and return proper code and information to client if permission denied :: >>> @PathPermissions( ... PathPermissions.readable, ... PathPermissions.writable) ... def foo(self, connection, path): ... ... """ readable = "readable" writable = "writable" def __init__(self, *permissions): self.permissions = permissions def __call__(self, f): @functools.wraps(f) async def wrapper(cls, connection, rest, *args): real_path, virtual_path = cls.get_paths(connection, rest) current_permission = await connection.user.get_permissions( virtual_path, ) for permission in self.permissions: if not getattr(current_permission, permission): connection.response("550", "permission denied") return True return await f(cls, connection, rest, *args) return wrapper def worker(f): """ Decorator. Abortable worker. If wrapped task will be cancelled by dispatcher, decorator will send ftp codes of successful interrupt. :: >>> @worker ... async def worker(self, connection, rest): ... ... """ @functools.wraps(f) async def wrapper(cls, connection, rest): try: await f(cls, connection, rest) except asyncio.CancelledError: connection.response("426", "transfer aborted") connection.response("226", "abort successful") return wrapper class Server: """ FTP server. :param users: list of users or user manager object :type users: :py:class:`tuple` or :py:class:`list` of :py:class:`aioftp.User` or instance of :py:class:`aioftp.AbstractUserManager` subclass :param block_size: bytes count for socket read operations :type block_size: :py:class:`int` :param socket_timeout: timeout for socket read and write operations :type socket_timeout: :py:class:`float`, :py:class:`int` or :py:class:`None` :param idle_timeout: timeout for socket read operations, another words: how long user can keep silence without sending commands :type idle_timeout: :py:class:`float`, :py:class:`int` or :py:class:`None` :param wait_future_timeout: wait for data connection to establish :type wait_future_timeout: :py:class:`float`, :py:class:`int` or :py:class:`None` :param path_timeout: timeout for path-related operations (make directory, unlink file, etc.) :type path_timeout: :py:class:`float`, :py:class:`int` or :py:class:`None` :param path_io_factory: factory of «path abstract layer» :type path_io_factory: :py:class:`aioftp.AbstractPathIO` :param maximum_connections: Maximum command connections per server :type maximum_connections: :py:class:`int` :param read_speed_limit: server read speed limit in bytes per second :type read_speed_limit: :py:class:`int` or :py:class:`None` :param write_speed_limit: server write speed limit in bytes per second :type write_speed_limit: :py:class:`int` or :py:class:`None` :param read_speed_limit_per_connection: server read speed limit per connection in bytes per second :type read_speed_limit_per_connection: :py:class:`int` or :py:class:`None` :param write_speed_limit_per_connection: server write speed limit per connection in bytes per second :type write_speed_limit_per_connection: :py:class:`int` or :py:class:`None` :param ipv4_pasv_forced_response_address: external IPv4 address for passive connections :type ipv4_pasv_forced_response_address: :py:class:`str` or :py:class:`None` :param data_ports: port numbers that are available for passive connections :type data_ports: :py:class:`collections.Iterable` or :py:class:`None` :param encoding: encoding to use for convertion strings to bytes :type encoding: :py:class:`str` :param ssl: can be set to an :py:class:`ssl.SSLContext` instance to enable TLS over the accepted connections. Please look :py:meth:`asyncio.loop.create_server` docs. :type ssl: :py:class:`ssl.SSLContext` """ def __init__(self, users=None, *, block_size=DEFAULT_BLOCK_SIZE, socket_timeout=None, idle_timeout=None, wait_future_timeout=1, path_timeout=None, path_io_factory=pathio.PathIO, maximum_connections=None, read_speed_limit=None, write_speed_limit=None, read_speed_limit_per_connection=None, write_speed_limit_per_connection=None, ipv4_pasv_forced_response_address=None, data_ports=None, encoding="utf-8", ssl=None): self.block_size = block_size self.socket_timeout = socket_timeout self.idle_timeout = idle_timeout self.wait_future_timeout = wait_future_timeout self.path_io_factory = pathio.PathIONursery(path_io_factory) self.path_timeout = path_timeout self.ipv4_pasv_forced_response_address = \ ipv4_pasv_forced_response_address if data_ports is not None: self.available_data_ports = asyncio.PriorityQueue() for data_port in data_ports: self.available_data_ports.put_nowait((0, data_port)) else: self.available_data_ports = None if isinstance(users, AbstractUserManager): self.user_manager = users else: self.user_manager = MemoryUserManager(users) self.available_connections = AvailableConnections(maximum_connections) self.throttle = StreamThrottle.from_limits( read_speed_limit, write_speed_limit, ) self.throttle_per_connection = StreamThrottle.from_limits( read_speed_limit_per_connection, write_speed_limit_per_connection, ) self.throttle_per_user = {} self.encoding = encoding self.ssl = ssl self.commands_mapping = { "abor": self.abor, "appe": self.appe, "cdup": self.cdup, "cwd": self.cwd, "dele": self.dele, "epsv": self.epsv, "list": self.list, "mkd": self.mkd, "mlsd": self.mlsd, "mlst": self.mlst, "pass": self.pass_, "pasv": self.pasv, "pbsz": self.pbsz, "prot": self.prot, "pwd": self.pwd, "quit": self.quit, "rest": self.rest, "retr": self.retr, "rmd": self.rmd, "rnfr": self.rnfr, "rnto": self.rnto, "stor": self.stor, "syst": self.syst, "type": self.type, "user": self.user, } async def start(self, host=None, port=0, **kwargs): """ :py:func:`asyncio.coroutine` Start server. :param host: ip address to bind for listening. :type host: :py:class:`str` :param port: port number to bind for listening. :type port: :py:class:`int` :param kwargs: keyword arguments, they passed to :py:func:`asyncio.start_server` """ self._start_server_extra_arguments = kwargs self.connections = {} self.server_host = host self.server_port = port self.server = await asyncio.start_server( self.dispatcher, host, port, ssl=self.ssl, **self._start_server_extra_arguments, ) for sock in self.server.sockets: if sock.family in (socket.AF_INET, socket.AF_INET6): host, port, *_ = sock.getsockname() if not self.server_port: self.server_port = port if not self.server_host: self.server_host = host logger.info("serving on %s:%s", host, port) async def serve_forever(self): """ :py:func:`asyncio.coroutine` Proxy to :py:class:`asyncio.Server` `serve_forever` method. """ return await self.server.serve_forever() async def run(self, host=None, port=0, **kwargs): """ :py:func:`asyncio.coroutine` Single entrypoint to start, serve and close. :param host: ip address to bind for listening. :type host: :py:class:`str` :param port: port number to bind for listening. :type port: :py:class:`int` :param kwargs: keyword arguments, they passed to :py:func:`asyncio.start_server` """ await self.start(host=host, port=port, **kwargs) try: await self.serve_forever() finally: await self.close() @property def address(self): """ Server listen socket host and port as :py:class:`tuple` """ return self.server_host, self.server_port async def close(self): """ :py:func:`asyncio.coroutine` Shutdown the server and close all connections. """ self.server.close() tasks = [asyncio.create_task(self.server.wait_closed())] for connection in self.connections.values(): connection._dispatcher.cancel() tasks.append(connection._dispatcher) logger.debug("waiting for %d tasks", len(tasks)) await asyncio.wait(tasks) async def write_line(self, stream, line): logger.debug(line) await stream.write((line + END_OF_LINE).encode(encoding=self.encoding)) async def write_response(self, stream, code, lines="", list=False): """ :py:func:`asyncio.coroutine` Complex method for sending response. :param stream: command connection stream :type stream: :py:class:`aioftp.StreamIO` :param code: server response code :type code: :py:class:`str` :param lines: line or lines, which are response information :type lines: :py:class:`str` or :py:class:`collections.Iterable` :param list: if true, then lines will be sended without code prefix. This is useful for **LIST** FTP command and some others. :type list: :py:class:`bool` """ lines = wrap_with_container(lines) write = functools.partial(self.write_line, stream) if list: head, *body, tail = lines await write(code + "-" + head) for line in body: await write(" " + line) await write(code + " " + tail) else: *body, tail = lines for line in body: await write(code + "-" + line) await write(code + " " + tail) async def parse_command(self, stream, censor_commands=("pass",)): """ :py:func:`asyncio.coroutine` Complex method for getting command. :param stream: connection stream :type stream: :py:class:`asyncio.StreamIO` :param censor_commands: An optional list of commands to censor. :type censor_commands: :py:class:`tuple` of :py:class:`str` :return: (code, rest) :rtype: (:py:class:`str`, :py:class:`str`) """ line = await stream.readline() if not line: raise ConnectionResetError s = line.decode(encoding=self.encoding).rstrip() cmd, _, rest = s.partition(" ") if cmd.lower() in censor_commands: stars = "*" * len(rest) logger.debug("%s %s", cmd, stars) else: logger.debug("%s %s", cmd, rest) return cmd.lower(), rest async def response_writer(self, stream, response_queue): """ :py:func:`asyncio.coroutine` Worker for write_response with current connection. Get data to response from queue, this is for right order of responses. Exits if received :py:class:`None`. :param stream: command connection stream :type connection: :py:class:`aioftp.StreamIO` :param response_queue: :type response_queue: :py:class:`asyncio.Queue` """ while True: args = await response_queue.get() try: await self.write_response(stream, *args) finally: response_queue.task_done() async def dispatcher(self, reader, writer): """ :py:func:`asyncio.coroutine` Server connection handler (main routine per user). """ host, port, *_ = writer.transport.get_extra_info("peername", ("", "")) current_server_host, *_ = writer.transport.get_extra_info("sockname") logger.info("new connection from %s:%s", host, port) key = stream = ThrottleStreamIO( reader, writer, throttles=dict( server_global=self.throttle, server_per_connection=self.throttle_per_connection.clone() ), read_timeout=self.idle_timeout, write_timeout=self.socket_timeout, ) response_queue = asyncio.Queue() connection = Connection( client_host=host, client_port=port, server_host=current_server_host, passive_server_port=0, server_port=self.server_port, command_connection=stream, socket_timeout=self.socket_timeout, idle_timeout=self.idle_timeout, wait_future_timeout=self.wait_future_timeout, block_size=self.block_size, path_io_factory=self.path_io_factory, path_timeout=self.path_timeout, extra_workers=set(), response=lambda *args: response_queue.put_nowait(args), acquired=False, restart_offset=0, _dispatcher=get_current_task(), ) connection.path_io = self.path_io_factory(timeout=self.path_timeout, connection=connection) pending = { asyncio.create_task(self.greeting(connection, "")), asyncio.create_task(self.response_writer(stream, response_queue)), asyncio.create_task(self.parse_command(stream)), } self.connections[key] = connection try: while True: done, pending = await asyncio.wait( pending | connection.extra_workers, return_when=asyncio.FIRST_COMPLETED, ) connection.extra_workers -= done for task in done: try: result = task.result() except errors.PathIOError: connection.response("451", "file system error") continue # this is "command" result if isinstance(result, bool): if not result: await response_queue.join() return # this is parse_command result elif isinstance(result, tuple): pending.add( asyncio.create_task(self.parse_command(stream)) ) cmd, rest = result f = self.commands_mapping.get(cmd) if f is not None: pending.add( asyncio.create_task(f(connection, rest)) ) if cmd not in ("retr", "stor", "appe"): connection.restart_offset = 0 else: message = f"{cmd!r} not implemented" connection.response("502", message) except asyncio.CancelledError: raise except Exception: logger.exception("dispatcher caught exception") finally: logger.info("closing connection from %s:%s", host, port) tasks_to_wait = [] if not asyncio.get_running_loop().is_closed(): for task in pending | connection.extra_workers: task.cancel() tasks_to_wait.append(task) if connection.future.passive_server.done(): connection.passive_server.close() if self.available_data_ports is not None: port = connection.passive_server_port self.available_data_ports.put_nowait((0, port)) if connection.future.data_connection.done(): connection.data_connection.close() stream.close() if connection.acquired: self.available_connections.release() if connection.future.user.done(): task = asyncio.create_task( self.user_manager.notify_logout(connection.user) ) tasks_to_wait.append(task) self.connections.pop(key) if tasks_to_wait: await asyncio.wait(tasks_to_wait) @staticmethod def get_paths(connection, path): """ Return *real* and *virtual* paths, resolves ".." with "up" action. *Real* path is path for path_io, when *virtual* deals with "user-view" and user requests :param connection: internal options for current connected user :type connection: :py:class:`dict` :param path: received path from user :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :return: (real_path, virtual_path) :rtype: (:py:class:`pathlib.Path`, :py:class:`pathlib.PurePosixPath`) """ virtual_path = pathlib.PurePosixPath(path) if not virtual_path.is_absolute(): virtual_path = connection.current_directory / virtual_path resolved_virtual_path = pathlib.PurePosixPath("/") for part in virtual_path.parts[1:]: if part == "..": resolved_virtual_path = resolved_virtual_path.parent else: resolved_virtual_path /= part base_path = connection.user.base_path real_path = base_path / resolved_virtual_path.relative_to("/") # replace with `is_relative_to` check after 3.9+ requirements lands try: real_path.relative_to(base_path) except ValueError: real_path = base_path resolved_virtual_path = pathlib.PurePosixPath("/") return real_path, resolved_virtual_path async def greeting(self, connection, rest): if self.available_connections.locked(): ok, code, info = False, "421", "Too many connections" else: ok, code, info = True, "220", "welcome" connection.acquired = True self.available_connections.acquire() connection.response(code, info) return ok async def user(self, connection, rest): if connection.future.user.done(): await self.user_manager.notify_logout(connection.user) del connection.user del connection.logged state, user, info = await self.user_manager.get_user(rest) if state == AbstractUserManager.GetUserResponse.OK: code = "230" connection.logged = True connection.user = user elif state == AbstractUserManager.GetUserResponse.PASSWORD_REQUIRED: code = "331" connection.user = user elif state == AbstractUserManager.GetUserResponse.ERROR: code = "530" else: message = f"Unknown response {state}" raise NotImplementedError(message) if connection.future.user.done(): connection.current_directory = connection.user.home_path if connection.user not in self.throttle_per_user: throttle = StreamThrottle.from_limits( connection.user.read_speed_limit, connection.user.write_speed_limit, ) self.throttle_per_user[connection.user] = throttle connection.command_connection.throttles.update( user_global=self.throttle_per_user[connection.user], user_per_connection=StreamThrottle.from_limits( connection.user.read_speed_limit_per_connection, connection.user.write_speed_limit_per_connection, ) ) connection.response(code, info) return True @ConnectionConditions(ConnectionConditions.user_required) async def pass_(self, connection, rest): if connection.future.logged.done(): code, info = "503", "already logged in" elif await self.user_manager.authenticate(connection.user, rest): connection.logged = True code, info = "230", "normal login" else: code, info = "530", "wrong password" connection.response(code, info) return True async def quit(self, connection, rest): connection.response("221", "bye") return False @ConnectionConditions(ConnectionConditions.login_required) async def pwd(self, connection, rest): code, info = "257", f"\"{connection.current_directory}\"" connection.response(code, info) return True @ConnectionConditions(ConnectionConditions.login_required) @PathConditions( PathConditions.path_must_exists, PathConditions.path_must_be_dir) @PathPermissions(PathPermissions.readable) async def cwd(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) connection.current_directory = virtual_path connection.response("250", "") return True @ConnectionConditions(ConnectionConditions.login_required) async def cdup(self, connection, rest): return await self.cwd(connection, connection.current_directory.parent) @ConnectionConditions(ConnectionConditions.login_required) @PathConditions(PathConditions.path_must_not_exists) @PathPermissions(PathPermissions.writable) async def mkd(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) await connection.path_io.mkdir(real_path, parents=True) connection.response("257", "") return True @ConnectionConditions(ConnectionConditions.login_required) @PathConditions( PathConditions.path_must_exists, PathConditions.path_must_be_dir) @PathPermissions(PathPermissions.writable) async def rmd(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) await connection.path_io.rmdir(real_path) connection.response("250", "") return True @staticmethod def _format_mlsx_time(local_seconds): return time.strftime("%Y%m%d%H%M%S", time.gmtime(local_seconds)) def _build_mlsx_facts_from_stats(self, stats): return { "Size": stats.st_size, "Create": self._format_mlsx_time(stats.st_ctime), "Modify": self._format_mlsx_time(stats.st_mtime), } async def build_mlsx_string(self, connection, path): if not await connection.path_io.exists(path): facts = {} else: stats = await connection.path_io.stat(path) facts = self._build_mlsx_facts_from_stats(stats) if await connection.path_io.is_file(path): facts["Type"] = "file" elif await connection.path_io.is_dir(path): facts["Type"] = "dir" else: facts["Type"] = "unknown" s = "" for name, value in facts.items(): s += f"{name}={value};" s += " " + path.name return s @ConnectionConditions( ConnectionConditions.login_required, ConnectionConditions.passive_server_started) @PathConditions(PathConditions.path_must_exists) @PathPermissions(PathPermissions.readable) async def mlsd(self, connection, rest): @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", fail_info="Can't open data connection") @worker async def mlsd_worker(self, connection, rest): stream = connection.data_connection del connection.data_connection async with stream: async for path in connection.path_io.list(real_path): s = await self.build_mlsx_string(connection, path) b = (s + END_OF_LINE).encode(encoding=self.encoding) await stream.write(b) connection.response("200", "mlsd transfer done") return True real_path, virtual_path = self.get_paths(connection, rest) coro = mlsd_worker(self, connection, rest) task = asyncio.create_task(coro) connection.extra_workers.add(task) connection.response("150", "mlsd transfer started") return True @staticmethod def build_list_mtime(st_mtime, now=None): if now is None: now = time.time() mtime = time.localtime(st_mtime) with setlocale("C"): if now - HALF_OF_YEAR_IN_SECONDS < st_mtime <= now: s = time.strftime("%b %e %H:%M", mtime) else: s = time.strftime("%b %e %Y", mtime) return s async def build_list_string(self, connection, path): stats = await connection.path_io.stat(path) mtime = self.build_list_mtime(stats.st_mtime) fields = ( stat.filemode(stats.st_mode), str(stats.st_nlink), "none", "none", str(stats.st_size), mtime, path.name ) s = " ".join(fields) return s @ConnectionConditions( ConnectionConditions.login_required, ConnectionConditions.passive_server_started) @PathConditions(PathConditions.path_must_exists) @PathPermissions(PathPermissions.readable) async def list(self, connection, rest): @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", fail_info="Can't open data connection") @worker async def list_worker(self, connection, rest): stream = connection.data_connection del connection.data_connection async with stream: async for path in connection.path_io.list(real_path): if not (await connection.path_io.exists(path)): logger.warning("path %r does not exists", path) continue s = await self.build_list_string(connection, path) b = (s + END_OF_LINE).encode(encoding=self.encoding) await stream.write(b) connection.response("226", "list transfer done") return True real_path, virtual_path = self.get_paths(connection, rest) coro = list_worker(self, connection, rest) task = asyncio.create_task(coro) connection.extra_workers.add(task) connection.response("150", "list transfer started") return True @ConnectionConditions(ConnectionConditions.login_required) @PathConditions(PathConditions.path_must_exists) @PathPermissions(PathPermissions.readable) async def mlst(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) s = await self.build_mlsx_string(connection, real_path) connection.response("250", ["start", s, "end"], True) return True @ConnectionConditions(ConnectionConditions.login_required) @PathConditions(PathConditions.path_must_exists) @PathPermissions(PathPermissions.writable) async def rnfr(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) connection.rename_from = real_path connection.response("350", "rename from accepted") return True @ConnectionConditions( ConnectionConditions.login_required, ConnectionConditions.rename_from_required) @PathConditions(PathConditions.path_must_not_exists) @PathPermissions(PathPermissions.writable) async def rnto(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) rename_from = connection.rename_from del connection.rename_from await connection.path_io.rename(rename_from, real_path) connection.response("250", "") return True @ConnectionConditions(ConnectionConditions.login_required) @PathConditions( PathConditions.path_must_exists, PathConditions.path_must_be_file) @PathPermissions(PathPermissions.writable) async def dele(self, connection, rest): real_path, virtual_path = self.get_paths(connection, rest) await connection.path_io.unlink(real_path) connection.response("250", "") return True @ConnectionConditions( ConnectionConditions.login_required, ConnectionConditions.passive_server_started) @PathPermissions(PathPermissions.writable) async def stor(self, connection, rest, mode="wb"): @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", fail_info="Can't open data connection") @worker async def stor_worker(self, connection, rest): stream = connection.data_connection del connection.data_connection if connection.restart_offset: file_mode = "r+b" else: file_mode = mode file_out = connection.path_io.open(real_path, mode=file_mode) async with file_out, stream: if connection.restart_offset: await file_out.seek(connection.restart_offset) async for data in stream.iter_by_block(connection.block_size): await file_out.write(data) connection.response("226", "data transfer done") return True real_path, virtual_path = self.get_paths(connection, rest) if await connection.path_io.is_dir(real_path.parent): coro = stor_worker(self, connection, rest) task = asyncio.create_task(coro) connection.extra_workers.add(task) code, info = "150", "data transfer started" else: code, info = "550", "path unreachable" connection.response(code, info) return True @ConnectionConditions( ConnectionConditions.login_required, ConnectionConditions.passive_server_started) @PathConditions( PathConditions.path_must_exists, PathConditions.path_must_be_file) @PathPermissions(PathPermissions.readable) async def retr(self, connection, rest): @ConnectionConditions( ConnectionConditions.data_connection_made, wait=True, fail_code="425", fail_info="Can't open data connection") @worker async def retr_worker(self, connection, rest): stream = connection.data_connection del connection.data_connection file_in = connection.path_io.open(real_path, mode="rb") async with file_in, stream: if connection.restart_offset: await file_in.seek(connection.restart_offset) async for data in file_in.iter_by_block(connection.block_size): await stream.write(data) connection.response("226", "data transfer done") return True real_path, virtual_path = self.get_paths(connection, rest) coro = retr_worker(self, connection, rest) task = asyncio.create_task(coro) connection.extra_workers.add(task) connection.response("150", "data transfer started") return True @ConnectionConditions(ConnectionConditions.login_required) async def type(self, connection, rest): if rest in ("I", "A"): connection.transfer_type = rest code, info = "200", "" else: code, info = "502", f"type {rest!r} not implemented" connection.response(code, info) return True @ConnectionConditions(ConnectionConditions.login_required) async def pbsz(self, connection, rest): connection.response("200", "") return True @ConnectionConditions(ConnectionConditions.login_required) async def prot(self, connection, rest): if rest == "P": code, info = "200", "" else: code, info = "502", "" connection.response(code, info) return True async def _start_passive_server(self, connection, handler_callback): if self.available_data_ports is not None: viewed_ports = set() while True: try: priority, port = self.available_data_ports.get_nowait() if port in viewed_ports: raise errors.NoAvailablePort viewed_ports.add(port) passive_server = await asyncio.start_server( handler_callback, connection.server_host, port, ssl=self.ssl, **self._start_server_extra_arguments, ) connection.passive_server_port = port break except asyncio.QueueEmpty: raise errors.NoAvailablePort except OSError as err: self.available_data_ports.put_nowait((priority + 1, port)) if err.errno != errno.EADDRINUSE: raise else: passive_server = await asyncio.start_server( handler_callback, connection.server_host, connection.passive_server_port, ssl=self.ssl, **self._start_server_extra_arguments, ) return passive_server @ConnectionConditions(ConnectionConditions.login_required) async def pasv(self, connection, rest): async def handler(reader, writer): if connection.future.data_connection.done(): writer.close() else: connection.data_connection = ThrottleStreamIO( reader, writer, throttles=connection.command_connection.throttles, timeout=connection.socket_timeout, ) if not connection.future.passive_server.done(): coro = self._start_passive_server(connection, handler) try: connection.passive_server = await coro except errors.NoAvailablePort: connection.response("421", ["no free ports"]) return False code, info = "227", ["listen socket created"] else: code, info = "227", ["listen socket already exists"] for sock in connection.passive_server.sockets: if sock.family == socket.AF_INET: host, port = sock.getsockname() # If the FTP server is behind NAT, the server needs to report # its external IP instead of the internal IP so that the client # is able to connect to the server. if self.ipv4_pasv_forced_response_address: host = self.ipv4_pasv_forced_response_address break else: connection.response("503", ["this server started in ipv6 mode"]) return False nums = tuple(map(int, host.split("."))) + (port >> 8, port & 0xff) info.append(f"({','.join(map(str, nums))})") if connection.future.data_connection.done(): connection.data_connection.close() del connection.data_connection connection.response(code, info) return True @ConnectionConditions(ConnectionConditions.login_required) async def epsv(self, connection, rest): async def handler(reader, writer): if connection.future.data_connection.done(): writer.close() else: connection.data_connection = ThrottleStreamIO( reader, writer, throttles=connection.command_connection.throttles, timeout=connection.socket_timeout, ) if rest: code, info = "522", ["custom protocols support not implemented"] connection.response(code, info) return False if not connection.future.passive_server.done(): coro = self._start_passive_server(connection, handler) try: connection.passive_server = await coro except errors.NoAvailablePort: connection.response("421", ["no free ports"]) return False code, info = "229", ["listen socket created"] else: code, info = "229", ["listen socket already exists"] for sock in connection.passive_server.sockets: if sock.family in (socket.AF_INET, socket.AF_INET6): _, port, *_ = sock.getsockname() break info[0] += f" (|||{port}|)" if connection.future.data_connection.done(): connection.data_connection.close() del connection.data_connection connection.response(code, info) return True @ConnectionConditions(ConnectionConditions.login_required) async def abor(self, connection, rest): if connection.extra_workers: for worker in connection.extra_workers: worker.cancel() else: connection.response("226", "nothing to abort") return True async def appe(self, connection, rest): return await self.stor(connection, rest, "ab") async def rest(self, connection, rest): if rest.isdigit(): connection.restart_offset = int(rest) connection.response("350", f"restarting at {rest}") else: connection.restart_offset = 0 message = f"syntax error, can't restart at {rest!r}" connection.response("501", message) return True async def syst(self, connection, rest): """Return system type (always returns UNIX type: L8).""" connection.response("215", "UNIX Type: L8") return True aioftp-0.21.4/doc-requirements.txt000066400000000000000000000000431432163100400170760ustar00rootroot00000000000000sphinx alabaster docutils < 0.18.0 aioftp-0.21.4/docs/000077500000000000000000000000001432163100400140025ustar00rootroot00000000000000aioftp-0.21.4/docs/Makefile000066400000000000000000000163611432163100400154510ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aioftp.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aioftp.qhc" applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/aioftp" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aioftp" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." aioftp-0.21.4/docs/_static/000077500000000000000000000000001432163100400154305ustar00rootroot00000000000000aioftp-0.21.4/docs/_static/logo.ico000066400000000000000000000102761432163100400170720ustar00rootroot00000000000000  ( @ wwwwwwx{xwwwm򊨊wwwmx{xqhqqhqau֚֚auttR̙̙R˚ۜۜљw_ΜΙw_RbzzbR&zz&uXUzzUXuC7|bzzb|7C XzzX  KzzK KzzK  XzzX C7|bzzb|7CuXUzzUXu&zz&RbzzbRљw_ΜΙw_˚ۜۜR̙̙Rttau֚֚auqhqqhqx{xwwwm򊨊wwwmx{xwwwwww*T[[*Taioftp-0.21.4/docs/_static/logo.png000066400000000000000000000036251432163100400171040ustar00rootroot00000000000000PNG  IHDR_gAMA a cHRMz&u0`:pQ< PLTEwwwAtRNS@fbKGD- pHYs  tIME  ?IDATxKn*31YBJJwMdYB*<%dL.lUI%U6A=x4$$$$$$$$$$$$$$${In+Ce Wuc6X[]U:EORU"O䭦*{//*ҫr[ZVN_լ4@jƄZq|t]%<9[%ƪ9ECXi9EF> 5Nan-mďu!^yHfݷφ{ {̃0F8B\Ϗ=zd3ox\.?zF<#j%!Dx6 \* "KOg"&\pCOE^^%Ce =2ď?BX ўˆ!mIa=ma_6h̯Ȝ!2" ƒ2y)1E5ёCT@ёw@]Xbr@\9Erv-M(pBT4Ԅf*},Ct:dg!<1AHa3=DADD-٧T2!2)D!eHa^T}-D.D;7$A5A5StnA @v!2 B '#}%J\waO9eH2 d&S! KEiAam7eY~!c?;p+ Em)Q|?! * GC0\ u  G 0G !&!?!Y!Gr1!Ǥ!!G!1  ) * i70 D P I] i0v ) ɏ i0 I0 ) 0i! !1I'!31@!)M!Y1if! s1!I1!1)!ɾ1i1 ))!T>> client = aioftp.Client() >>> await client.connect("ftp.server.com") >>> await client.login("user", "pass") Or just use :class:`aioftp.context` async context, which will connect, login and quit automatically :: >>> async with aioftp.Client.context("ftp.server.com", user="user", password="pass") as client: ... # do Download and upload paths ------------------------- :py:meth:`aioftp.Client.upload` and :py:meth:`aioftp.Client.download` coroutines are pretty similar, except data flow direction. You can upload/download file or directory. There is "source" and "destination". When you does not specify "destination", then current working directory will be used as destination. Lets upload some file to current directory :: >>> await client.upload("test.py") If you want specify new name, or different path to uploading/downloading path you should use "write_into" argument, which works for directory as well :: >>> await client.upload("test.py", "tmp/test.py", write_into=True) >>> await client.upload("folder1", "folder2", write_into=True) After that you get :: tmp/test.py folder2/*content of folder1* If you will not use "write_into", you will get something you probably did not expect :: tmp/test.py/test.py folder2/folder1/*content of folder1* Or you can upload path as is and then rename it (:py:meth:`aioftp.Client.rename`) Downloading is pretty same :: >>> await client.download("tmp/test.py", "foo.py", write_into=True) >>> await client.download("folder2") Listing paths ------------- For listing paths you should use :py:meth:`aioftp.Client.list` coroutine, which can list paths recursively and produce a :py:class:`list` and can be used with `async for` :: >>> await client.list("/") [(PosixPath('/.logs'), {'unix.mode': '0755', 'unique': '801g4804045', ... :: >>> await client.list("/", recursive=True) [(PosixPath('/.logs'), {'unix.mode': '0755', 'unique': '801g4804045', ... :: >>> async for path, info in client.list("/", recursive=True): ... print(path) (PosixPath('/.logs'), {'unix.mode': '0755', 'unique': '801g4804045', ... If you ommit path argument, result will be list for current working directory :: >>> await c.list() [(PosixPath('test.py'), {'unique': '801g480a508', 'size': '3102', ... In case of `async for` be careful, since asynchronous variation of list is lazy. It means that **you can't interact with server until you leave `async for` block.** If you need list and interact with server you should use eager version of list: :: >>> for path, info in (await client.list()): ... await client.download(path, path.name) If you want to mix lazy `list` and client interaction, you can create two client connections to server: :: >>> async for path, info in client1.list(): ... await client2.download(path, path.name) WARNING ^^^^^^^ :py:meth:`aioftp.Client.list` in general use `MLSD` command, but some nasty servers does not support this command. Then client will try to use `LIST` command, and parse server response. For proper work of :py:meth:`datetime.datetime.strptime` (in part of parsing month abbreviation) locale should be setted to "C". For this reason if you use multithreaded app, and use some locale-dependent stuff, you should use :py:meth:`aioftp.setlocale` context manager when you dealing with locale in another thread. **since 0.8.1** If fallback `LIST` parser can't parse line, then this line will be ignored, so fallback `LIST` implementation will never raise exception. Getting path stats ------------------ When you need get some path stats you should use :py:meth:`aioftp.Client.stat` :: >>> await client.stat("tmp2.py") {'size': '909', 'create': '1445437246.4320722', 'type': 'file', ... >>> await client.stat(".git") {'create': '1445435702.6441028', 'type': 'dir', 'size': '4096', ... If you need just to check path for is it file, directory or exists you can use :py:meth:`aioftp.Client.is_file` :py:meth:`aioftp.Client.is_dir` :py:meth:`aioftp.Client.exists` :: >>> await client.is_file("/public_html") False >>> await client.is_dir("/public_html") True >>> await client.is_file("test.py") True >>> await client.exists("test.py") True >>> await client.exists("naked-guido.png") False WARNING ^^^^^^^ :py:meth:`aioftp.Client.stat` in general use `MLST` command, but some nasty servers does not support this command. Then client will try to use `LIST` command, and parse server response. For proper work of :py:meth:`datetime.datetime.strptime` (in part of parsing month abbreviation) locale should be setted to "C". For this reason if you use multithreaded app, and use some locale-dependent stuff, you should use :py:meth:`aioftp.setlocale` context manager when you dealing with locale in another thread. **since 0.8.1** If fallback `LIST` parser can't parse line, then this line will be ignored, so fallback `LIST` implementation will never raise exception. But if requested path line can't be parsed, then :py:meth:`aioftp.Client.stat` method will raise `path does not exists`. Remove path ----------- For removing paths you have universal coroutine :py:meth:`aioftp.Client.remove` which can remove file or directory recursive. So, you don't need to do borring checks. :: >>> await client.remove("tmp.py") >>> await client.remove("folder1") Dealing with directories ------------------------ Directories coroutines are pretty simple. :py:meth:`aioftp.Client.get_current_directory` :py:meth:`aioftp.Client.change_directory` :py:meth:`aioftp.Client.make_directory` :: >>> await client.get_current_directory() PosixPath('/public_html') >>> await client.change_directory("folder1") >>> await client.get_current_directory() PosixPath('/public_html/folder1') >>> await client.change_directory() >>> await client.get_current_directory() PosixPath('/public_html') >>> await client.make_directory("folder2") >>> await client.change_directory("folder2") >>> await client.get_current_directory() PosixPath('/public_html/folder2') Rename (move) path ------------------ To change name (move) file or directory use :py:meth:`aioftp.Client.rename`. :: >>> await client.list() [(PosixPath('test.py'), {'modify': '20150423090041', 'type': 'file', ... >>> await client.rename("test.py", "foo.py") >>> await client.list() [(PosixPath('foo.py'), {'modify': '20150423090041', 'type': 'file', ... Closing connection ------------------ :py:meth:`aioftp.Client.quit` coroutine will send "QUIT" ftp command and close connection. :: >>> await client.quit() Advanced download and upload, abort, restart -------------------------------------------- File read/write operations are blocking and slow. So if you want just parse/calculate something on the fly when receiving file, or generate data to upload it to file system on ftp server, then you should use :py:meth:`aioftp.Client.download_stream`, :py:meth:`aioftp.Client.upload_stream` and :py:meth:`aioftp.Client.append_stream`. All this methods based on :py:meth:`aioftp.Client.get_stream`, which return :py:class:`aioftp.DataConnectionThrottleStreamIO`. The common pattern to work with streams is: :: >>> async with client.download_stream("tmp.py") as stream: ... async for block in stream.iter_by_block(): ... # do something with data Or, if you want to abort transfer at some point :: >>> stream = await client.download_stream("tmp.py") ... async for block in stream.iter_by_block(): ... # do something with data ... if something_not_interesting: ... await client.abort() ... stream.close() ... break ... else: ... await stream.finish() WARNING ^^^^^^^ Do not use `async with ` syntax if you want to use `abort`, this will lead to deadlock. For restarting upload/download at exact byte position (REST command) there is `offset` argument for `*_stream` methods: :: >>> async with client.download_stream("tmp.py", offset=256) as stream: ... async for block in stream.iter_by_block(): ... # do something with data Or if you want to restore upload/download process: :: >>> while True: ... try: ... async with aioftp.Client.context(HOST, PORT) as client: ... if await client.exists(filename): ... stat = await client.stat(filename) ... size = int(stat["size"]) ... else: ... size = 0 ... file_in.seek(size) ... async with client.upload_stream(filename, offset=size) as stream: ... while True: ... data = file_in.read(block_size) ... if not data: ... break ... await stream.write(data) ... break ... except ConnectionResetError: ... pass The idea is to seek position of source «file» for upload and start upload + offset/append. Opposite situation for download («file» append and download + offset) Throttle -------- Client have two types of speed limit: `read_speed_limit` and `write_speed_limit`. Throttle can be set at initialization time: :: >>> client = aioftp.Client(read_speed_limit=100 * 1024) # 100 Kib/s And can be changed after creation: :: >>> client.throttle.write.limit = 250 * 1024 Path abstraction layer ---------------------- aioftp provides abstraction of file system operations. You can use exist ones: * :py:class:`aioftp.PathIO` — blocking path operations * :py:class:`aioftp.AsyncPathIO` — non-blocking path operations, this one is blocking ones just wrapped with :py:meth:`asyncio.BaseEventLoop.run_in_executor`. It's really slow, so it's better to avoid usage of this path io layer. * :py:class:`aioftp.MemoryPathIO` — in-memory realization of file system, this one is just proof of concept and probably not too fast (as it can be). You can specify `path_io_factory` when creating :py:class:`aioftp.Client` instance. Default factory is :py:class:`aioftp.PathIO`. :: >>> client = aioftp.Client(path_io_factory=pathio.MemoryPathIO) Timeouts -------- :py:class:`aioftp.Client` have `socket_timeout` argument, which you can use to specify global timeout for socket io operations. :: >>> client = aioftp.Client(socket_timeout=1) # 1 second socket timeout :py:class:`aioftp.Client` also have `path_timeout`, which is applied **only for non-blocking path io layers**. :: >>> client = aioftp.Client( ... path_timeout=1, ... path_io_factory=pathio.AsyncPathIO ... ) Using proxy ----------- Simplest way to use socks proxy with :class:`aioftp.Client` is `siosocks `_ :: >>> client = aioftp.Client( ... socks_host="localhost", ... socks_port=9050, ... socks_version=5, ... ) Don't forget to install `aioftp` as `pip install aioftp[socks]`, or install `siosocks` directly with `pip install siosocks`. WARNING ------- :py:meth:`aioftp.Client.list` and :py:meth:`aioftp.Client.stat` in general use `MLSD` and `MLST`, but some nasty servers does not support this commands. Then client will try to use `LIST` command, and parse server response. For proper work of :py:meth:`datetime.datetime.strptime` (in part of parsing month abbreviation) locale should be setted to "C". For this reason if you use multithreaded app, and use some locale-dependent stuff, you should use :py:meth:`aioftp.setlocale` context manager when you dealing with locale in another thread. **since 0.8.1** If fallback `LIST` parser can't parse line, then this line will be ignored, so fallback `LIST` implementation will never raise exception. Futher reading -------------- :doc:`client_api` aioftp-0.21.4/docs/common_api.rst000066400000000000000000000007131432163100400166560ustar00rootroot00000000000000.. common_api: Common API ========== .. autoclass:: aioftp.StreamIO :members: .. autoclass:: aioftp.Throttle :members: .. autoclass:: aioftp.StreamThrottle :members: .. autoclass:: aioftp.ThrottleStreamIO :members: :show-inheritance: .. autoclass:: aioftp.AsyncListerMixin .. autoclass:: aioftp.AbstractAsyncLister .. autofunction:: aioftp.with_timeout .. autofunction:: aioftp.async_enterable .. autofunction:: aioftp.setlocale aioftp-0.21.4/docs/conf.py000066400000000000000000000232341432163100400153050ustar00rootroot00000000000000#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # aioftp documentation build configuration file, created by # sphinx-quickstart on Fri Apr 17 16:21:03 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import shlex import alabaster # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) import aioftp # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'alabaster', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'aioftp' copyright = '2016, pohmelie' author = 'pohmelie' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = aioftp.__version__ # The full version, including alpha/beta/rc tags. release = aioftp.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { 'logo': 'logo.png', 'description': 'ftp client/server for asyncio', 'github_user': 'pohmelie', 'github_repo': 'aioftp', 'github_button': True, 'github_banner': True, # 'travis_button': True, 'pre_bg': '#FFF6E5', 'note_bg': '#E5ECD1', 'note_border': '#BFCF8C', 'body_text': '#482C0A', 'sidebar_text': '#49443E', 'sidebar_header': '#4B4032', 'page_width': "90%", } # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [alabaster.get_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = "_static/logo.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'aioftpdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'aioftp.tex', 'aioftp Documentation', 'pohmelie', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'aioftp', 'aioftp Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'aioftp', 'aioftp Documentation', author, 'aioftp', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False intersphinx_mapping = { "python": ("https://docs.python.org/3", None), "aioftp": ("./_build/html", None), } html_logo = "_static/logo.png" aioftp-0.21.4/docs/developer_tutorial.rst000066400000000000000000000207711432163100400204530ustar00rootroot00000000000000.. developer_tutorial: Developer tutorial ================== Both, client and server classes are inherit-minded when created. So, you need to inherit class and override and/or add methods to bring your functionality. Client ------ For simple commands, which requires no extra connection, realization of new method is pretty simple. You just need to use :py:meth:`aioftp.Client.command` (or even don't use it). For example, lets realize «NOOP» command, which do nothing: :: class MyClient(aioftp.Client): async def noop(self): await self.command("NOOP", "2xx") Lets take a look to a more complex example. Say, we want to collect some data via extra connection. For this one you need one of «extra connection» methods: :py:meth:`aioftp.Client.download_stream`, :py:meth:`aioftp.Client.upload_stream`, :py:meth:`aioftp.Client.append_stream` or (for more complex situations) :py:meth:`aioftp.Client.get_stream` Here we implements some «COLL x» command. I don't know why, but it retrieve some data via extra connection. And the size of data is equal to «x». :: class MyClient(aioftp.Client): async def collect(self, count): collected = [] async with self.get_stream("COLL " + str(count), "1xx") as stream: async for block in stream.iter_by_block(8): i = int.from_bytes(block, "big") print("received:", block, i) collected.append(i) return collected Client retrieve passive (or active in future versions) via `get_stream` and read blocks of data until connection is closed. Then finishing stream and return result. Most of client functions (except low-level from BaseClient) are made in pretty same manner. It is a good idea you to see source code of :py:class:`aioftp.Client` in client.py to see when and why this or that techniques used. Server ------ Server class based on dispatcher, which wait for result of tasks via :py:meth:`asyncio.wait`. Tasks are different: command-reader, result-writer, commander-action, extra-connection-workers. FTP methods dispatched by name. Lets say we want implement «NOOP» command for server again: :: class MyServer(aioftp.Server): async def noop(self, connection, rest): connection.response("200", "boring") return True What we have here? Dispatcher calls our method with some arguments: * `connection` is state of connection, this can hold and wait for futures. There many connection values you can interest in: addresses, throttles, timeouts, extra_workers, response, etc. You can add your own flags and values to the «connection» and edit the existing ones of course. It's better to see source code of server, cause connection is heart of dispatcher ↔ task and task ↔ task interaction and state container. * `rest`: rest part of command string There is some decorators, which can help for routine checks: is user logged, can he read/write this path, etc. :py:class:`aioftp.ConnectionConditions` :py:class:`aioftp.PathConditions` :py:class:`aioftp.PathPermissions` For more complex example lets try same client «COLL x» command. :: class MyServer(aioftp.Server): @aioftp.ConnectionConditions( aioftp.ConnectionConditions.login_required, aioftp.ConnectionConditions.passive_server_started) async def coll(self, connection, rest): @aioftp.ConnectionConditions( aioftp.ConnectionConditions.data_connection_made, wait=True, fail_code="425", fail_info="Can't open data connection") @aioftp.server.worker async def coll_worker(self, connection, rest): stream = connection.data_connection del connection.data_connection async with stream: for i in range(count): binary = i.to_bytes(8, "big") await stream.write(binary) connection.response("200", "coll transfer done") return True count = int(rest) coro = coll_worker(self, connection, rest) task = connection.loop.create_task(coro) connection.extra_workers.add(task) connection.response("150", "coll transfer started") return True This action requires passive connection, that is why we use worker. We should be able to receive commands when receiving data with extra connection, that is why we send our task to dispatcher via `extra_workers`. Task will be pending on next «iteration» of dispatcher. Lets see what we have. :: async def test(): server = MyServer() client = MyClient() await server.start("127.0.0.1", 8021) await client.connect("127.0.0.1", 8021) await client.login() collected = await client.collect(20) print(collected) await client.quit() await server.close() if __name__ == "__main__": logging.basicConfig( level=logging.INFO, format="%(asctime)s [%(name)s] %(message)s", datefmt="[%H:%M:%S]:", ) loop = asyncio.get_event_loop() loop.run_until_complete(test()) print("done") And the output for this is: :: [01:18:54]: [aioftp.server] serving on 127.0.0.1:8021 [01:18:54]: [aioftp.server] new connection from 127.0.0.1:48883 [01:18:54]: [aioftp.server] 220 welcome [01:18:54]: [aioftp.client] 220 welcome [01:18:54]: [aioftp.client] USER anonymous [01:18:54]: [aioftp.server] USER anonymous [01:18:54]: [aioftp.server] 230 anonymous login [01:18:54]: [aioftp.client] 230 anonymous login [01:18:54]: [aioftp.client] TYPE I [01:18:54]: [aioftp.server] TYPE I [01:18:54]: [aioftp.server] 200 [01:18:54]: [aioftp.client] 200 [01:18:54]: [aioftp.client] PASV [01:18:54]: [aioftp.server] PASV [01:18:54]: [aioftp.server] 227-listen socket created [01:18:54]: [aioftp.server] 227 (127,0,0,1,223,249) [01:18:54]: [aioftp.client] 227-listen socket created [01:18:54]: [aioftp.client] 227 (127,0,0,1,223,249) [01:18:54]: [aioftp.client] COLL 20 [01:18:54]: [aioftp.server] COLL 20 [01:18:54]: [aioftp.server] 150 coll transfer started [01:18:54]: [aioftp.client] 150 coll transfer started received: b'\x00\x00\x00\x00\x00\x00\x00\x00' 0 received: b'\x00\x00\x00\x00\x00\x00\x00\x01' 1 received: b'\x00\x00\x00\x00\x00\x00\x00\x02' 2 received: b'\x00\x00\x00\x00\x00\x00\x00\x03' 3 received: b'\x00\x00\x00\x00\x00\x00\x00\x04' 4 received: b'\x00\x00\x00\x00\x00\x00\x00\x05' 5 received: b'\x00\x00\x00\x00\x00\x00\x00\x06' 6 received: b'\x00\x00\x00\x00\x00\x00\x00\x07' 7 received: b'\x00\x00\x00\x00\x00\x00\x00\x08' 8 received: b'\x00\x00\x00\x00\x00\x00\x00\t' 9 received: b'\x00\x00\x00\x00\x00\x00\x00\n' 10 received: b'\x00\x00\x00\x00\x00\x00\x00\x0b' 11 received: b'\x00\x00\x00\x00\x00\x00\x00\x0c' 12 received: b'\x00\x00\x00\x00\x00\x00\x00\r' 13 received: b'\x00\x00\x00\x00\x00\x00\x00\x0e' 14 received: b'\x00\x00\x00\x00\x00\x00\x00\x0f' 15 received: b'\x00\x00\x00\x00\x00\x00\x00\x10' 16 received: b'\x00\x00\x00\x00\x00\x00\x00\x11' 17 received: b'\x00\x00\x00\x00\x00\x00\x00\x12' 18 [01:18:54]: [aioftp.server] 200 coll transfer done received: b'\x00\x00\x00\x00\x00\x00\x00\x13' 19 [01:18:54]: [aioftp.client] 200 coll transfer done [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] [01:18:54]: [aioftp.client] QUIT [01:18:54]: [aioftp.server] QUIT [01:18:54]: [aioftp.server] 221 bye [01:18:54]: [aioftp.server] closing connection from 127.0.0.1:48883 [01:18:54]: [aioftp.client] 221 bye done It is a good idea you to see source code of :py:class:`aioftp.Server` in server.py to see when and why this or that techniques used. Path abstraction layer ---------------------- Since file io is blocking and aioftp tries to be non-blocking ftp library, we need some abstraction layer for filesystem operations. That is why pathio exists. If you want to create your own pathio, then you should inherit :py:class:`aioftp.AbstractPathIO` and override it methods. User Manager ------------ User manager purpose is to split retrieving user information from network or database and server logic. You can create your own user manager by inherit :py:class:`aioftp.AbstractUserManager` and override it methods. The new user manager should be passed to server as `users` argument when initialize server. aioftp-0.21.4/docs/index.rst000066400000000000000000000113241432163100400156440ustar00rootroot00000000000000.. aioftp documentation master file, created by sphinx-quickstart on Fri Apr 17 16:21:03 2015. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. aioftp ====== ftp client/server for asyncio. .. _GitHub: https://github.com/aio-libs/aioftp Features -------- - Simple. - Extensible. - Client socks proxy via `siosocks `_ (`pip install aioftp[socks]`). Goals ----- - Minimum usable core. - Do not use deprecated or overridden commands and features (if possible). - Very high level api. Client use this commands: USER, PASS, ACCT, PWD, CWD, CDUP, MKD, RMD, MLSD, MLST, RNFR, RNTO, DELE, STOR, APPE, RETR, TYPE, PASV, ABOR, QUIT, REST, LIST (as fallback) Server support this commands: USER, PASS, QUIT, PWD, CWD, CDUP, MKD, RMD, MLSD, LIST (but it's not recommended to use it, cause it has no standard format), MLST, RNFR, RNTO, DELE, STOR, RETR, TYPE ("I" and "A"), PASV, ABOR, APPE, REST This subsets are enough for 99% of tasks, but if you need something, then you can easily extend current set of commands. Server benchmark ---------------- Compared with `pyftpdlib `_ and checked with its ftpbench script. aioftp 0.8.0 :: STOR (client -> server) 284.95 MB/sec RETR (server -> client) 408.44 MB/sec 200 concurrent clients (connect, login) 0.18 secs STOR (1 file with 200 idle clients) 287.52 MB/sec RETR (1 file with 200 idle clients) 382.05 MB/sec 200 concurrent clients (RETR 10.0M file) 13.33 secs 200 concurrent clients (STOR 10.0M file) 12.56 secs 200 concurrent clients (QUIT) 0.03 secs pyftpdlib 1.5.2 :: STOR (client -> server) 1235.56 MB/sec RETR (server -> client) 3960.21 MB/sec 200 concurrent clients (connect, login) 0.06 secs STOR (1 file with 200 idle clients) 1208.58 MB/sec RETR (1 file with 200 idle clients) 3496.03 MB/sec 200 concurrent clients (RETR 10.0M file) 0.55 secs 200 concurrent clients (STOR 10.0M file) 1.46 secs 200 concurrent clients (QUIT) 0.02 secs Dependencies ------------ - Python 3.7+ 0.13.0 is the last version which supports python 3.5.3+ 0.16.1 is the last version which supports python 3.6+ License ------- aioftp is offered under the Apache 2 license. Library installation -------------------- :: pip install aioftp Getting started --------------- Client example **WARNING** For all commands, which use some sort of «stats» or «listing», ``aioftp`` tries at first ``MLSx``-family commands (since they have structured, machine readable format for all platforms). But old/lazy/nasty servers do not implement this commands. In this case ``aioftp`` tries a ``LIST`` command, which have no standard format and can not be parsed in all cases. Take a look at `FileZilla `_ «directory listing» parser code. So, before creating new issue be sure this is not your case (you can check it with logs). Anyway, you can provide your own ``LIST`` parser routine (see the client documentation). .. code-block:: python import asyncio import aioftp async def get_mp3(host, port, login, password): async with aioftp.Client.context(host, port, login, password) as client: for path, info in (await client.list(recursive=True)): if info["type"] == "file" and path.suffix == ".mp3": await client.download(path) async def main(): tasks = [ asyncio.create_task(get_mp3("server1.com", 21, "login", "password")), asyncio.create_task(get_mp3("server2.com", 21, "login", "password")), asyncio.create_task(get_mp3("server3.com", 21, "login", "password")), ] await asyncio.wait(tasks) asyncio.run(main()) Server example .. code-block:: python import asyncio import aioftp async def main(): server = aioftp.Server([user], path_io_factory=path_io_factory) await server.run() asyncio.run(main()) Or just use simple server .. code-block:: shell python -m aioftp --help Further reading --------------- .. toctree:: :maxdepth: 2 client_tutorial server_tutorial developer_tutorial client_api server_api common_api path_io_api Indices and tables ------------------ * :ref:`genindex` * :ref:`search` aioftp-0.21.4/docs/make.bat000066400000000000000000000161141432163100400154120ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled echo. coverage to run coverage check of the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) REM Check if sphinx-build is available and fallback to Python version if any %SPHINXBUILD% 2> nul if errorlevel 9009 goto sphinx_python goto sphinx_ok :sphinx_python set SPHINXBUILD=python -m sphinx.__init__ %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) :sphinx_ok if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aioftp.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aioftp.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %~dp0 echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "coverage" ( %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage if errorlevel 1 exit /b 1 echo. echo.Testing of coverage in the sources finished, look at the ^ results in %BUILDDIR%/coverage/python.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end aioftp-0.21.4/docs/path_io_api.rst000066400000000000000000000007131432163100400170110ustar00rootroot00000000000000.. path_io_api: Path abstraction layer API ========================== .. autoclass :: aioftp.AbstractPathIO :members: :private-members: .. autoclass :: aioftp.pathio.AsyncPathIOContext .. autofunction :: aioftp.pathio.universal_exception .. autoclass :: aioftp.PathIO :show-inheritance: .. autoclass :: aioftp.AsyncPathIO :show-inheritance: .. autoclass :: aioftp.MemoryPathIO :show-inheritance: .. autoclass :: aioftp.PathIOError aioftp-0.21.4/docs/server_api.rst000066400000000000000000000013301432163100400166700ustar00rootroot00000000000000.. server_api: Server API ========== .. autoclass :: aioftp.Server :members: start, close, serve_forever, run :show-inheritance: .. autofunction :: aioftp.server.worker .. autoclass :: aioftp.User :members: .. autoclass :: aioftp.Permission :members: .. autoclass :: aioftp.AbstractUserManager :members: :exclude-members: GetUserResponse .. autoclass :: aioftp.server.MemoryUserManager :members: .. autoclass :: aioftp.Connection :members: :show-inheritance: .. autoclass :: aioftp.AvailableConnections :members: .. autoclass :: aioftp.ConnectionConditions :members: .. autoclass :: aioftp.PathConditions :members: .. autoclass :: aioftp.PathPermissions :members: aioftp-0.21.4/docs/server_tutorial.rst000066400000000000000000000115421432163100400177700ustar00rootroot00000000000000.. server_tutorial: Server tutorial =============== aioftp server is much more like a tool. You configure it, run and forget about it. Configuring server ------------------ At first you should create :class:`aioftp.Server` instance and start it :py:meth:`aioftp.Server.start` :: >>> server = aioftp.Server() >>> await server.start() Default arguments allow anonymous login and read/write current directory. So, there is one user with anonymous login and read/write permissions on "/" virtual path. Real path is current working directory. Dealing with users and permissions ---------------------------------- You can specify as much users as you want, just pass list of them when creating :class:`aioftp.Server` instance :class:`aioftp.User` :class:`aioftp.Permission` :: >>> users = ( ... aioftp.User( ... "Guido", ... "secret_password", ... home_path="/Guido", ... permissions=( ... aioftp.Permission("/", readable=False, writable=False), ... aioftp.Permission("/Guido", readable=True, writable=True), ... ) ... ), ... aioftp.User( ... home_path="/anon", ... permissions=( ... aioftp.Permission("/", readable=False, writable=False), ... aioftp.Permission("/anon", readable=True), ... ) ... ), ... ) >>> server = aioftp.Server(users) >>> await server.start() This will create two users: "Guido", who can read and write to "/Guido" folder, which is home folder, but can't read/write the root and other directories and anonymous user, who home directory is "/anon" and there is only read permission. Path abstraction layer ---------------------- aioftp provides abstraction of file system operations. You can use exist ones: * :py:class:`aioftp.PathIO` — blocking path operations * :py:class:`aioftp.AsyncPathIO` — non-blocking path operations, this one is blocking ones just wrapped with :py:meth:`asyncio.BaseEventLoop.run_in_executor`. It's really slow, so it's better to avoid usage of this path io layer. * :py:class:`aioftp.MemoryPathIO` — in-memory realization of file system, this one is just proof of concept and probably not too fast (as it can be). You can specify `path_io_factory` when creating :py:class:`aioftp.Server` instance. Default factory is :py:class:`aioftp.PathIO`. :: >>> server = aioftp.Server(path_io_factory=aioftp.MemoryPathIO) >>> await server.start() Dealing with timeouts --------------------- There is three different timeouts you can specify: * `socket_timeout` — timeout for low-level socket operations :py:meth:`asyncio.StreamReader.read`, :py:meth:`asyncio.StreamReader.readline` and :py:meth:`asyncio.StreamWriter.drain`. This one does not affects awaiting command read operation. * `path_timeout` — timeout for file system operations * `idle_timeout` — timeout for socket read operation when awaiting command, another words: how long user can keep silence without sending commands * `wait_future_timeout` — timeout for waiting connection states (the main purpose is wait for passive connection) Maximum connections ------------------- Connections count can be specified: * per server * per user First one via server constructor :: >>> server = aioftp.Server(maximum_connections=3) Second one via user class :: >>> users = (aioftp.User(maximum_connections=3),) >>> server = aioftp.Server(users) Throttle -------- Server have many options for read/write speed throttle: * global per server * per connection * global per user * per user connection "Global per server" and "per connection" can be provided by constructor :: >>> server = aioftp.Server( ... read_speed_limit=1024 * 1024, ... write_speed_limit=1024 * 1024, ... read_speed_limit_per_connection=100 * 1024, ... write_speed_limit_per_connection=100 * 1024 ... ) User throttles can be provided by user constructor :: >>> users = ( ... aioftp.User( ... read_speed_limit=1024 * 1024, ... write_speed_limit=1024 * 1024, ... read_speed_limit_per_connection=100 * 1024, ... write_speed_limit_per_connection=100 * 1024 ... ), ... ) >>> server = aioftp.Server(users) Stopping the server ------------------- :: >>> await server.close() WARNING ------- :py:meth:`aioftp.Server.list` use :py:meth:`aioftp.Server.build_list_string`, which should produce `LIST` strings with :py:meth:`datetime.datetime.strftime`. For proper work (in part of formatting month abbreviation) locale should be setted to "C". For this reason if you use multithreaded app, and use some locale-dependent stuff, you should use :py:meth:`aioftp.setlocale` context manager when you dealing with locale in another thread. Futher reading -------------- :doc:`server_api` aioftp-0.21.4/ftpbench.py000066400000000000000000000445161432163100400152270ustar00rootroot00000000000000#!/usr/bin/env python # Copyright (C) 2007-2016 Giampaolo Rodola' . # Use of this source code is governed by MIT license that can be # found in the LICENSE file. """ FTP server benchmark script. In order to run this you must have a listening FTP server with a user with writing permissions configured. This is a stand-alone script which does not depend from pyftpdlib. It just requires python >= 2.6 (and optionally psutil to keep track of FTP server memory usage). Example usages: ftpbench -u USER -p PASSWORD ftpbench -u USER -p PASSWORD -H ftp.domain.com -P 21 # host / port ftpbench -u USER -p PASSWORD -b transfer ftpbench -u USER -p PASSWORD -b concurrence ftpbench -u USER -p PASSWORD -b all ftpbench -u USER -p PASSWORD -b concurrence -n 500 # 500 clients ftpbench -u USER -p PASSWORD -b concurrence -s 20M # file size ftpbench -u USER -p PASSWORD -b concurrence -p 3521 # memory usage """ # Some benchmarks (Linux 3.0.0, Intel core duo - 3.1 Ghz). # pyftpdlib 1.0.0: # # (starting with 6.7M of memory being used) # STOR (client -> server) 557.97 MB/sec 6.7M # RETR (server -> client) 1613.82 MB/sec 6.8M # 300 concurrent clients (connect, login) 1.20 secs 8.8M # STOR (1 file with 300 idle clients) 567.52 MB/sec 8.8M # RETR (1 file with 300 idle clients) 1561.41 MB/sec 8.8M # 300 concurrent clients (RETR 10.0M file) 3.26 secs 10.8M # 300 concurrent clients (STOR 10.0M file) 8.46 secs 12.6M # 300 concurrent clients (QUIT) 0.07 secs # # # proftpd 1.3.4a: # # (starting with 1.4M of memory being used) # STOR (client -> server) 554.67 MB/sec 3.2M # RETR (server -> client) 1517.12 MB/sec 3.2M # 300 concurrent clients (connect, login) 9.30 secs 568.6M # STOR (1 file with 300 idle clients) 484.11 MB/sec 570.6M # RETR (1 file with 300 idle clients) 1534.61 MB/sec 570.6M # 300 concurrent clients (RETR 10.0M file) 3.67 secs 568.6M # 300 concurrent clients (STOR 10.0M file) 11.21 secs 568.7M # 300 concurrent clients (QUIT) 0.43 secs # # # vsftpd 2.3.2 # # (starting with 352.0K of memory being used) # STOR (client -> server) 607.23 MB/sec 816.0K # RETR (server -> client) 1506.59 MB/sec 816.0K # 300 concurrent clients (connect, login) 18.91 secs 140.9M # STOR (1 file with 300 idle clients) 618.99 MB/sec 141.4M # RETR (1 file with 300 idle clients) 1402.48 MB/sec 141.4M # 300 concurrent clients (RETR 10.0M file) 3.64 secs 140.9M # 300 concurrent clients (STOR 10.0M file) 9.74 secs 140.9M # 300 concurrent clients (QUIT) 0.00 secs from __future__ import division, print_function import asynchat import asyncore import atexit import contextlib import ftplib import optparse import os import ssl import sys import time try: import resource except ImportError: resource = None try: import psutil except ImportError: psutil = None HOST = 'localhost' PORT = 21 USER = None PASSWORD = None TESTFN = "$testfile" BUFFER_LEN = 8192 SERVER_PROC = None TIMEOUT = None FILE_SIZE = "10M" SSL = False PY3 = sys.version_info >= (3, 0) server_memory = [] # python >= 2.7.9 SSLWantReadError = getattr(ssl, "SSLWantReadError", object()) SSLWantWriteError = getattr(ssl, "SSLWantWriteError", object()) # python <= 2.7.8 SSL_ERROR_WANT_READ = getattr(ssl, "SSL_ERROR_WANT_READ", object()) SSL_ERROR_WANT_WRITE = getattr(ssl, "SSL_ERROR_WANT_WRITE", object()) if not sys.stdout.isatty() or os.name != 'posix': def hilite(s, *args, **kwargs): return s else: # http://goo.gl/6V8Rm def hilite(string, ok=True, bold=False): """Return an highlighted version of 'string'.""" attr = [] if ok is None: # no color pass elif ok: # green attr.append('32') else: # red attr.append('31') if bold: attr.append('1') return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), string) def print_bench(what, value, unit=""): s = "%s %s %-8s" % (hilite("%-50s" % what, ok=None, bold=0), hilite("%8.2f" % value), unit) if server_memory: s += "%s" % hilite(server_memory.pop()) print(s.strip()) # http://goo.gl/zeJZl def bytes2human(n, format="%(value).1f%(symbol)s"): """ >>> bytes2human(10000) '9K' >>> bytes2human(100001221) '95M' """ symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') prefix = {} for i, s in enumerate(symbols[1:]): prefix[s] = 1 << (i + 1) * 10 for symbol in reversed(symbols[1:]): if n >= prefix[symbol]: value = float(n) / prefix[symbol] return format % locals() return format % dict(symbol=symbols[0], value=n) # http://goo.gl/zeJZl def human2bytes(s): """ >>> human2bytes('1M') 1048576 >>> human2bytes('1G') 1073741824 """ symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') letter = s[-1:].strip().upper() num = s[:-1] assert num.isdigit() and letter in symbols, s num = float(num) prefix = {symbols[0]: 1} for i, s in enumerate(symbols[1:]): prefix[s] = 1 << (i + 1) * 10 return int(num * prefix[letter]) def register_memory(): """Register an approximation of memory used by FTP server process and all of its children. """ # XXX How to get a reliable representation of memory being used is # not clear. (rss - shared) seems kind of ok but we might also use # the private working set via get_memory_maps().private*. def get_mem(proc): if os.name == 'posix': mem = proc.memory_info_ex() counter = mem.rss if 'shared' in mem._fields: counter -= mem.shared return counter else: # TODO figure out what to do on Windows return proc.get_memory_info().rss if SERVER_PROC is not None: mem = get_mem(SERVER_PROC) for child in SERVER_PROC.children(): mem += get_mem(child) server_memory.append(bytes2human(mem)) def timethis(what): """"Utility function for making simple benchmarks (calculates time calls). It can be used either as a context manager or as a decorator. """ @contextlib.contextmanager def benchmark(): timer = time.clock if sys.platform == "win32" else time.time start = timer() yield stop = timer() res = (stop - start) print_bench(what, res, "secs") if hasattr(what, "__call__"): def timed(*args, **kwargs): with benchmark(): return what(*args, **kwargs) return timed else: return benchmark() def connect(): """Connect to FTP server, login and return an ftplib.FTP instance.""" ftp_class = ftplib.FTP if not SSL else ftplib.FTP_TLS ftp = ftp_class(timeout=TIMEOUT) ftp.connect(HOST, PORT) ftp.login(USER, PASSWORD) if SSL: ftp.prot_p() # secure data connection return ftp def retr(ftp): """Same as ftplib's retrbinary() but discard the received data.""" ftp.voidcmd('TYPE I') with contextlib.closing(ftp.transfercmd("RETR " + TESTFN)) as conn: recv_bytes = 0 while True: data = conn.recv(BUFFER_LEN) if not data: break recv_bytes += len(data) ftp.voidresp() def stor(ftp=None): """Same as ftplib's storbinary() but just sends dummy data instead of reading it from a real file. """ if ftp is None: ftp = connect() quit = True else: quit = False ftp.voidcmd('TYPE I') with contextlib.closing(ftp.transfercmd("STOR " + TESTFN)) as conn: chunk = b'x' * BUFFER_LEN total_sent = 0 while True: sent = conn.send(chunk) total_sent += sent if total_sent >= FILE_SIZE: break ftp.voidresp() if quit: ftp.quit() return ftp def bytes_per_second(ftp, retr=True): """Return the number of bytes transmitted in 1 second.""" tot_bytes = 0 if retr: def request_file(): ftp.voidcmd('TYPE I') conn = ftp.transfercmd("retr " + TESTFN) return conn with contextlib.closing(request_file()) as conn: register_memory() stop_at = time.time() + 1.0 while stop_at > time.time(): chunk = conn.recv(BUFFER_LEN) if not chunk: a = time.time() ftp.voidresp() conn.close() conn = request_file() stop_at += time.time() - a tot_bytes += len(chunk) try: while chunk: chunk = conn.recv(BUFFER_LEN) ftp.voidresp() conn.close() except (ftplib.error_temp, ftplib.error_perm): pass else: ftp.voidcmd('TYPE I') with contextlib.closing(ftp.transfercmd("STOR " + TESTFN)) as conn: register_memory() chunk = b'x' * BUFFER_LEN stop_at = time.time() + 1 while stop_at > time.time(): tot_bytes += conn.send(chunk) ftp.voidresp() return tot_bytes def cleanup(): ftp = connect() try: if TESTFN in ftp.mlsd(): ftp.delete(TESTFN) except (ftplib.error_perm, ftplib.error_temp) as err: msg = "could not delete %r test file on cleanup: %r" % (TESTFN, err) print(hilite(msg, ok=False), file=sys.stderr) ftp.quit() def bench_stor(ftp=None, title="STOR (client -> server)"): if ftp is None: ftp = connect() tot_bytes = bytes_per_second(ftp, retr=False) print_bench(title, round(tot_bytes / 1024.0 / 1024.0, 2), "MB/sec") ftp.quit() def bench_retr(ftp=None, title="RETR (server -> client)"): if ftp is None: ftp = connect() tot_bytes = bytes_per_second(ftp, retr=True) print_bench(title, round(tot_bytes / 1024.0 / 1024.0, 2), "MB/sec") ftp.quit() def bench_multi(howmany): # The OS usually sets a limit of 1024 as the maximum number of # open file descriptors for the current process. # Let's set the highest number possible, just to be sure. if howmany > 500 and resource is not None: soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) resource.setrlimit(resource.RLIMIT_NOFILE, (hard, hard)) def bench_multi_connect(): with timethis("%i concurrent clients (connect, login)" % howmany): clients = [] for x in range(howmany): clients.append(connect()) register_memory() return clients def bench_multi_retr(clients): stor(clients[0]) with timethis("%s concurrent clients (RETR %s file)" % ( howmany, bytes2human(FILE_SIZE))): for ftp in clients: ftp.voidcmd('TYPE I') conn = ftp.transfercmd("RETR " + TESTFN) AsyncReader(conn) register_memory() asyncore.loop(use_poll=True) for ftp in clients: ftp.voidresp() def bench_multi_stor(clients): with timethis("%s concurrent clients (STOR %s file)" % ( howmany, bytes2human(FILE_SIZE))): for ftp in clients: ftp.voidcmd('TYPE I') conn = ftp.transfercmd("STOR " + TESTFN) AsyncWriter(conn, FILE_SIZE) register_memory() asyncore.loop(use_poll=True) for ftp in clients: ftp.voidresp() def bench_multi_quit(clients): for ftp in clients: AsyncQuit(ftp.sock) with timethis("%i concurrent clients (QUIT)" % howmany): asyncore.loop(use_poll=True) clients = bench_multi_connect() bench_stor(title="STOR (1 file with %s idle clients)" % len(clients)) bench_retr(title="RETR (1 file with %s idle clients)" % len(clients)) bench_multi_retr(clients) bench_multi_stor(clients) bench_multi_quit(clients) @contextlib.contextmanager def handle_ssl_want_rw_errs(): try: yield except (SSLWantReadError, SSLWantWriteError) as err: if DEBUG: print(err) except ssl.SSLError as err: if err.args[0] in (SSL_ERROR_WANT_READ, SSL_ERROR_WANT_WRITE): if DEBUG: print(err) else: raise class AsyncReader(asyncore.dispatcher): """Just read data from a connected socket, asynchronously.""" def __init__(self, sock): asyncore.dispatcher.__init__(self, sock) def handle_read(self): if SSL: with handle_ssl_want_rw_errs(): chunk = self.socket.recv(65536) else: chunk = self.socket.recv(65536) if not chunk: self.close() def handle_close(self): self.close() def handle_error(self): raise class AsyncWriter(asyncore.dispatcher): """Just write dummy data to a connected socket, asynchronously.""" def __init__(self, sock, size): asyncore.dispatcher.__init__(self, sock) self.size = size self.sent = 0 self.chunk = b'x' * BUFFER_LEN def handle_write(self): if SSL: with handle_ssl_want_rw_errs(): self.sent += asyncore.dispatcher.send(self, self.chunk) else: self.sent += asyncore.dispatcher.send(self, self.chunk) if self.sent >= self.size: self.handle_close() def handle_error(self): raise class AsyncQuit(asynchat.async_chat): def __init__(self, sock): asynchat.async_chat.__init__(self, sock) self.in_buffer = [] self.set_terminator(b'\r\n') self.push(b'QUIT\r\n') def collect_incoming_data(self, data): self.in_buffer.append(data) def found_terminator(self): self.handle_close() def handle_error(self): raise class OptFormatter(optparse.IndentedHelpFormatter): def format_epilog(self, s): return s.lstrip() def format_option(self, option): result = [] opts = self.option_strings[option] result.append(' %s\n' % opts) if option.help: help_text = ' %s\n\n' % self.expand_default(option) result.append(help_text) return ''.join(result) def main(): global HOST, PORT, USER, PASSWORD, SERVER_PROC, TIMEOUT, SSL, FILE_SIZE, \ DEBUG USAGE = "%s -u USERNAME -p PASSWORD [-H] [-P] [-b] [-n] [-s] [-k] " \ "[-t] [-d] [-S]" % (os.path.basename(__file__)) parser = optparse.OptionParser(usage=USAGE, epilog=__doc__[__doc__.find('Example'):], formatter=OptFormatter()) parser.add_option('-u', '--user', dest='user', help='username') parser.add_option('-p', '--pass', dest='password', help='password') parser.add_option('-H', '--host', dest='host', default=HOST, help='hostname') parser.add_option('-P', '--port', dest='port', default=PORT, help='port', type=int) parser.add_option('-b', '--benchmark', dest='benchmark', default='transfer', help="benchmark type ('transfer', 'download', 'upload', " "'concurrence', 'all')") parser.add_option('-n', '--clients', dest='clients', default=200, type="int", help="number of concurrent clients used by " "'concurrence' benchmark") parser.add_option('-s', '--filesize', dest='filesize', default="10M", help="file size used by 'concurrence' benchmark " "(e.g. '10M')") parser.add_option('-k', '--pid', dest='pid', default=None, type="int", help="the PID of the FTP server process, to track its " "memory usage") parser.add_option('-t', '--timeout', dest='timeout', default=TIMEOUT, type="int", help="the socket timeout") parser.add_option('-d', '--debug', action='store_true', dest='debug', help="whether to print debugging info") parser.add_option('-S', '--ssl', action='store_true', dest='ssl', help="whether to use FTPS") options, args = parser.parse_args() if not options.user or not options.password: sys.exit(USAGE) else: USER = options.user PASSWORD = options.password HOST = options.host PORT = options.port TIMEOUT = options.timeout SSL = bool(options.ssl) DEBUG = options.debug if SSL and sys.version_info < (2, 7): sys.exit("--ssl option requires python >= 2.7") try: FILE_SIZE = human2bytes(options.filesize) except (ValueError, AssertionError): parser.error("invalid file size %r" % options.filesize) if options.pid is not None: if psutil is None: raise ImportError("-p option requires psutil module") SERVER_PROC = psutil.Process(options.pid) # before starting make sure we have write permissions ftp = connect() conn = ftp.transfercmd("STOR " + TESTFN) conn.close() ftp.voidresp() ftp.delete(TESTFN) ftp.quit() atexit.register(cleanup) # start benchmark if SERVER_PROC is not None: register_memory() print("(starting with %s of memory being used)" % ( hilite(server_memory.pop()))) if options.benchmark == 'download': stor() bench_retr() elif options.benchmark == 'upload': bench_stor() elif options.benchmark == 'transfer': bench_stor() bench_retr() elif options.benchmark == 'concurrence': bench_multi(options.clients) elif options.benchmark == 'all': bench_stor() bench_retr() bench_multi(options.clients) else: sys.exit("invalid 'benchmark' parameter %r" % options.benchmark) if __name__ == '__main__': main() aioftp-0.21.4/history.rst000066400000000000000000000231511432163100400153070ustar00rootroot00000000000000x.x.x (xx-xx-xxxx) ------------------ 0.21.4 (13-10-2022) ------------------- - tests: use `pytest_asyncio` `strict` mode and proper decorations (#155) - setup/tests: set low bound for version of `async-timeout` (#159) 0.21.3 (15-07-2022) ------------------- - server/`LIST`: prevent broken links are listed, but can't be used with `stat` - server: make `User.get_permissions` async 0.21.2 (22-04-2022) ------------------- - tests: remove exception representation check 0.21.1 (20-04-2022) ------------------- - tests: replace more specific `ConnectionRefusedError` with `OSError` for compatibility with FreeBSD (#152) Thanks to `AMDmi3 https://github.com/AMDmi3`_ 0.21.0 (18-03-2022) ------------------ - server: support PASV response with custom address (#150) Thanks to `janneronkko https://github.com/janneronkko`_ 0.20.1 (15-02-2022) ------------------ - server: fix real directory resolve for windows (#147) Thanks to `ported-pw https://github.com/ported-pw`_ 0.20.0 (27-12-2021) ------------------ - add client argument to set priority of custom list parser (`parse_list_line_custom_first`) (#145) - do not ignore failed parsing of list response (#144) Thanks to `spolloni https://github.com/spolloni`_ 0.19.0 (08-10-2021) ------------------ - add client connection timeout (#140) - remove explicit coroutine passing to `asyncio.wait` (#134) Thanks to `decaz `_ 0.18.1 (03-10-2020) ------------------ - sync tests with new `siosocks` (#127) - some docs fixes - log level changes 0.18.0 (03-09-2020) ------------------ - server: fix `MLSX` time format (#125) - server: resolve server address from connection (#125) Thanks to `PonyPC `_ 0.17.2 (21-08-2020) ------------------ - server: fix broken `python -m aioftp` after 3.7 migration 0.17.1 (14-08-2020) ------------------ - common/stream: add `readexactly` proxy method 0.17.0 (11-08-2020) ------------------ - tests: fix test_unlink_on_dir on POSIX compatible systems (#118) - docs: fix extra parentheses (#122) - client: replace `ClientSession` with `Client.context` Thanks to `AMDmi3 `_, `Olegt0rr `_ 0.16.1 (09-07-2020) ------------------ - client: strip date before parsing (#113) - client: logger no longer prints out plaintext password (#114) - client: add custom passive commands to client (#116) Thanks to `ndhansen `_ 0.16.0 (11-03-2020) ------------------ - server: remove obsolete `pass` to `pass_` command renaming Thanks to `Puddly `_ - client: fix leap year bug at `parse_ls_date` method - all: add base exception class Thanks to `decaz `_ 0.15.0 (07-01-2020) ------------------- - server: use explicit mapping of available commands for security reasons Thanks to `Puddly` for report 0.14.0 (30-12-2019) ------------------- - client: add socks proxy support via `siosocks `_ (#94) - client: add custom `list` parser (#95) Thanks to `purpleskyfall `_, `VyachAp `_ 0.13.0 (24-03-2019) ------------------- - client: add windows list parser (#82) - client/server: fix implicit ssl mode (#89) - tests: move to pytest - all: small fixes Thanks to `jw4js `_, `PonyPC `_ 0.12.0 (15-10-2018) ------------------- - all: add implicit ftps mode support (#81) Thanks to `alxpy `_, `webknjaz `_ 0.11.1 (30-08-2018) ------------------- - server: fix memory pathio is not shared between connections - client: add argument to `list` to allow manually specifying raw command (#78) Thanks to `thirtyseven `_ 0.11.0 (04-07-2018) ------------------- - client: fix parsing `ls` modify time (#60) - all: add python3.7 support (`__aiter__` must be regular function since now) (#76, #77) Thanks to `saulcruz `_, `NickG123 `_, `rsichny `_, `Modelmat `_, `webknjaz `_ 0.10.1 (01-03-2018) ------------------- - client: more flexible `EPSV` response parsing Thanks to `p4l1ly `_ 0.10.0 (03-02-2018) ------------------- - server: fix ipv6 peername unpack - server: `connection` object is accessible from path-io layer since now - main: add command line argument to set version of IP protocol - setup: fix failed test session return zero exit code - client: fix `download`-`mkdir` (issue #68) - client/server: add initial ipv6 support (issue #63) - client: change `PASV` to `EPSV` with fallback to `PASV` Thanks to `jacobtomlinson `_, `mbkr1992 `_ 0.9.0 (04-01-2018) ------------------ - server: fix server address in passive mode - server: do not reraise dispatcher exceptions - server: remove `wait_closed`, `close` is coroutine since now Thanks to `yieyu `_, `jkr78 `_ 0.8.1 (08-10-2017) ------------------ - client: ignore LIST lines, which can't be parsed Thanks to `bachya `_ 0.8.0 (06-08-2017) ------------------ - client/server: add explicit encoding Thanks to `anan-lee `_ 0.7.0 (17-04-2017) ------------------ - client: add base `LIST` parsing - client: add `client.list` fallback on `MLSD` «not implemented» status code to `LIST` - client: add `client.stat` fallback on `MLST` «not implemented» status code to `LIST` - common: add `setlocale` context manager for `LIST` parsing, formatting and thread-safe usage of locale - server: add `LIST` support for non-english locales - server: fix `PASV` sequencies before data transfer (latest `PASV` win) Thanks to `jw4js `_, `rsichny `_ 0.6.3 (02-03-2017) ------------------ - `stream.read` will read whole data by default (as `asyncio.StreamReader.read`) Thanks to `sametmax `_ 0.6.2 (27-02-2017) ------------------ - replace `docopt` with `argparse` - add `syst` server command - improve client `list` documentation Thanks to `thelostt `_, `yieyu `_ 0.6.1 (16-04-2016) ------------------ - fix documentation main page client example 0.6.0 (16-04-2016) ------------------ - fix `modifed time` field for `list` command result - add `ClientSession` context - add `REST` command to server and client Thanks to `rsichny `_ 0.5.0 (12-02-2016) ------------------ - change development status to production/stable - add configuration to restrict port range for passive server - build LIST string with stat.filemode Thanks to `rsichny `_ 0.4.1 (21-12-2015) ------------------ - improved performance on non-throttled streams - default path io layer for client and server is PathIO since now - added benchmark result 0.4.0 (17-12-2015) ------------------ - `async for` for pathio list function - async context manager for streams and pathio files io - python 3.5 only - logging provided by "aioftp.client" and "aioftp.server" - all path errors are now reraised as PathIOError - server does not drop connection on path io errors since now, but return "451" code 0.3.1 (09-11-2015) ------------------ - fixed setup.py long-description 0.3.0 (09-11-2015) ------------------ - added handling of OSError in dispatcher - fixed client/server close not opened file in finally - handling PASS after login - handling miltiply USER commands - user manager for dealing with user accounts - fixed client usage WindowsPath instead of PurePosixPath on windows for virtual paths - client protected from "0.0.0.0" ip address in PASV - client use pathio - throttle deal with multiply connections - fixed throttle bug when slow path io (#20) - path io timeouts moved to pathio.py - with_timeout decorator for methods - StreamIO deals with timeouts - all socket streams are ThrottleStreamIO since now Thanks to `rsichny `_, `tier2003 `_ 0.2.0 (22-09-2015) ------------------ - client throttle - new server dispatcher (can wait for connections) - maximum connections per user/server - new client stream api - end of line character "\r\n" everywhere - setup.py support - tests via "python setup.py test" - "sh" module removed from test requirements Thanks to `rsichny `_, `jettify `_ 0.1.7 (03-09-2015) ------------------ - bugfix on windows (can't make passive connection to 0.0.0.0:port) - default host is "127.0.0.1" since now - silently ignoring ipv6 sockets in server binding list 0.1.6 (03-09-2015) ------------------ - bugfix on windows (ipv6 address come first in list of binded sockets) 0.1.5 (01-09-2015) ------------------ - bugfix server on windows (PurePosixPath for virtual path) 0.1.4 (31-08-2015) ------------------ - close data connection after client disconnects Thanks to `rsichny `_ 0.1.3 (28-08-2015) ------------------ - pep8 "Method definitions inside a class are surrounded by a single blank line" - MemoryPathIO.Stats should include st_mode Thanks to `rsichny `_ 0.1.2 (11-06-2015) ------------------ - aioftp now executes like script ("python -m aioftp") 0.1.1 (10-06-2015) ------------------ - typos in server strings - docstrings for path abstraction layer 0.1.0 (05-06-2015) ------------------ - server functionality - path abstraction layer 0.0.1 (24-04-2015) ------------------ - first release (client only) aioftp-0.21.4/license.txt000066400000000000000000000261351432163100400152440ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. aioftp-0.21.4/pytest.ini000066400000000000000000000005071432163100400151050ustar00rootroot00000000000000[pytest] addopts = -x --durations=10 --cov-config=.coveragerc --cov=aioftp --cov-report=xml --cov-report=term --cov-report=term-missing testpaths = tests log_format = %(asctime)s.%(msecs)03d %(name)-20s %(levelname)-8s %(filename)-15s %(lineno)-4d %(message)s log_date_format = %H:%M:%S log_level = DEBUG asyncio_mode = strict aioftp-0.21.4/setup.cfg000066400000000000000000000012651432163100400146770ustar00rootroot00000000000000[metadata] name = aioftp version = attr: aioftp.__version__ url = https://github.com/aio-libs/aioftp author = pohmelie author_email = multisosnooley@gmail.com description = ftp client/server for asyncio long_description = file: README.rst license = Apache-2.0 license_file = license.txt classifiers = Programming Language :: Python Programming Language :: Python :: 3 Development Status :: 5 - Production/Stable Topic :: Internet :: File Transfer Protocol (FTP) [options] packages = aioftp python_requires = >= 3.7 [options.extras_require] socks = siosocks >= 0.2.0 tests = pytest pytest-asyncio pytest-cov trustme async_timeout >= 4.0.0 siosocks aioftp-0.21.4/setup.py000066400000000000000000000000471432163100400145650ustar00rootroot00000000000000from setuptools import setup setup() aioftp-0.21.4/tests/000077500000000000000000000000001432163100400142145ustar00rootroot00000000000000aioftp-0.21.4/tests/conftest.py000066400000000000000000000143561432163100400164240ustar00rootroot00000000000000import ssl import collections import contextlib import tempfile import asyncio import math import time import functools import socket from pathlib import Path import pytest import pytest_asyncio import trustme from async_timeout import timeout import aioftp from siosocks.io.asyncio import socks_server_handler # No ssl tests since https://bugs.python.org/issue36098 ca = trustme.CA() server_cert = ca.issue_server_cert("127.0.0.1", "::1") ssl_server = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) server_cert.configure_cert(ssl_server) ssl_client = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) ca.configure_trust(ssl_client) class Container: def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs @pytest.fixture def Client(): return Container @pytest.fixture def Server(): return Container def _wrap_with_defaults(kwargs): test_defaults = dict( path_io_factory=aioftp.MemoryPathIO, ) return collections.ChainMap(kwargs, test_defaults) @pytest.fixture(params=["127.0.0.1", "::1"]) def pair_factory(request): class Factory: def __init__(self, client=None, server=None, *, connected=True, logged=True, do_quit=True, host=request.param, server_factory=aioftp.Server, client_factory=aioftp.Client): if client is None: client = Container() self.client = client_factory(*client.args, **_wrap_with_defaults(client.kwargs)) if server is None: server = Container() self.server = server_factory(*server.args, **_wrap_with_defaults(server.kwargs)) self.connected = connected self.logged = logged self.do_quit = do_quit self.host = host self.timeout = timeout(1) async def make_server_files(self, *paths, size=None, atom=b"-"): if size is None: size = aioftp.DEFAULT_BLOCK_SIZE * 3 data = atom * size for p in paths: await self.client.make_directory(Path(p).parent) async with self.client.upload_stream(p) as stream: await stream.write(data) async def make_client_files(self, *paths, size=None, atom=b"-"): if size is None: size = aioftp.DEFAULT_BLOCK_SIZE * 3 data = atom * size for p in map(Path, paths): await self.client.path_io.mkdir(p.parent, parents=True, exist_ok=True) async with self.client.path_io.open(p, mode="wb") as f: await f.write(data) async def server_paths_exists(self, *paths): values = [] for p in paths: values.append(await self.client.exists(p)) if all(values): return True if any(values): raise ValueError("Mixed exists/not exists list") return False async def client_paths_exists(self, *paths): values = [] for p in paths: values.append(await self.client.path_io.exists(Path(p))) if all(values): return True if any(values): raise ValueError("Mixed exists/not exists list") return False async def __aenter__(self): await self.timeout.__aenter__() await self.server.start(host=self.host) if self.connected: await self.client.connect(self.server.server_host, self.server.server_port) if self.logged: await self.client.login() return self async def __aexit__(self, *exc_info): if self.connected and self.do_quit: await self.client.quit() self.client.close() await self.server.close() await self.timeout.__aexit__(*exc_info) return Factory @pytest.fixture def expect_codes_in_exception(): @contextlib.contextmanager def context(*codes): try: yield except aioftp.StatusCodeError as e: assert set(e.received_codes) == set(codes) else: raise RuntimeError("There was no exception") return context @pytest.fixture(params=[aioftp.MemoryPathIO, aioftp.PathIO, aioftp.AsyncPathIO]) def path_io(request): return request.param() @pytest.fixture def temp_dir(path_io): if isinstance(path_io, aioftp.MemoryPathIO): yield Path("/") else: with tempfile.TemporaryDirectory() as name: yield Path(name) class Sleep: def __init__(self): self.delay = 0 self.first_sleep = None async def sleep(self, delay, result=None, **kwargs): if self.first_sleep is None: self.first_sleep = time.monotonic() delay = (time.monotonic() - self.first_sleep) + delay self.delay = max(self.delay, delay) return result def is_close(self, delay, *, rel_tol=0.05, abs_tol=0.5): ok = math.isclose(self.delay, delay, rel_tol=rel_tol, abs_tol=abs_tol) if not ok: print(f"latest sleep: {self.delay}; expected delay: " f"{delay}; rel: {rel_tol}") return ok @pytest.fixture def skip_sleep(monkeypatch): with monkeypatch.context() as m: sleeper = Sleep() m.setattr(asyncio, "sleep", sleeper.sleep) yield sleeper @pytest_asyncio.fixture(params=[("127.0.0.1", socket.AF_INET), ("::1", socket.AF_INET6)]) async def socks(request, unused_tcp_port): handler = functools.partial( socks_server_handler, allowed_versions={5}, username="foo", password="bar", ) Socks = collections.namedtuple("Socks", "host port server") host, family = request.param port = unused_tcp_port server = await asyncio.start_server(handler, host=host, port=port, family=family) yield Socks(host, port, server) server.close() await server.wait_closed() aioftp-0.21.4/tests/test_abort.py000066400000000000000000000054731432163100400167450ustar00rootroot00000000000000import asyncio import itertools import pathlib import pytest import aioftp @pytest.mark.asyncio async def test_abort_stor(pair_factory): async with pair_factory() as pair: stream = await pair.client.upload_stream("test.txt") with pytest.raises((ConnectionResetError, BrokenPipeError)): while True: await stream.write(b"-" * aioftp.DEFAULT_BLOCK_SIZE) await pair.client.abort() class SlowReadMemoryPathIO(aioftp.MemoryPathIO): async def read(self, *args, **kwargs): await asyncio.sleep(0.01) return await super().read(*args, **kwargs) @pytest.mark.asyncio async def test_abort_retr(pair_factory, Server): s = Server(path_io_factory=SlowReadMemoryPathIO) async with pair_factory(None, s) as pair: await pair.make_server_files("test.txt") stream = await pair.client.download_stream("test.txt") for i in itertools.count(): data = await stream.read(aioftp.DEFAULT_BLOCK_SIZE) if not data: stream.close() break if i == 0: await pair.client.abort() @pytest.mark.asyncio async def test_abort_retr_no_wait(pair_factory, Server, expect_codes_in_exception): s = Server(path_io_factory=SlowReadMemoryPathIO) async with pair_factory(None, s) as pair: await pair.make_server_files("test.txt") stream = await pair.client.download_stream("test.txt") with expect_codes_in_exception("426"): for i in itertools.count(): data = await stream.read(aioftp.DEFAULT_BLOCK_SIZE) if not data: await stream.finish() break if i == 0: await pair.client.abort(wait=False) @pytest.mark.asyncio async def test_nothing_to_abort(pair_factory): async with pair_factory() as pair: await pair.client.abort() class SlowListMemoryPathIO(aioftp.MemoryPathIO): async def is_file(self, *a, **kw): return True def list(self, *args, **kwargs): class Lister(aioftp.AbstractAsyncLister): async def __anext__(cls): await asyncio.sleep(0.01) return pathlib.PurePath("/test.txt") return Lister() async def stat(self, *a, **kw): class Stat: st_size = 0 st_mtime = 0 st_ctime = 0 return Stat @pytest.mark.asyncio async def test_mlsd_abort(pair_factory, Server): s = Server(path_io_factory=SlowListMemoryPathIO) async with pair_factory(None, s) as pair: cwd = await pair.client.get_current_directory() assert cwd == pathlib.PurePosixPath("/") async for path, info in pair.client.list(): await pair.client.abort() break aioftp-0.21.4/tests/test_client_side_socks.py000066400000000000000000000012001432163100400213020ustar00rootroot00000000000000import pytest from siosocks.exceptions import SocksException @pytest.mark.asyncio async def test_socks_success(pair_factory, Client, socks): client = Client(socks_host=socks.host, socks_port=socks.port, socks_version=5, username="foo", password="bar") async with pair_factory(client): pass @pytest.mark.asyncio async def test_socks_fail(pair_factory, Client, socks): client = Client(socks_host=socks.host, socks_port=socks.port, socks_version=5, username="bar", password="bar") with pytest.raises(SocksException): async with pair_factory(client): pass aioftp-0.21.4/tests/test_connection.py000066400000000000000000000235411432163100400177710ustar00rootroot00000000000000import asyncio import ipaddress import pytest import aioftp @pytest.mark.asyncio async def test_client_without_server(pair_factory, unused_tcp_port_factory): f = pair_factory(connected=False, logged=False, do_quit=False) async with f as pair: pass with pytest.raises(OSError): await pair.client.connect("127.0.0.1", unused_tcp_port_factory()) @pytest.mark.asyncio async def test_connection(pair_factory): async with pair_factory(connected=True, logged=False, do_quit=False): pass @pytest.mark.asyncio async def test_quit(pair_factory): async with pair_factory(connected=True, logged=False, do_quit=True): pass @pytest.mark.asyncio async def test_not_implemented(pair_factory, expect_codes_in_exception): async with pair_factory() as pair: with expect_codes_in_exception("502"): await pair.client.command("FOOBAR", "2xx", "1xx") @pytest.mark.asyncio async def test_type_success(pair_factory, expect_codes_in_exception): async with pair_factory() as pair: await pair.client.get_passive_connection("A") @pytest.mark.asyncio async def test_custom_passive_commands(pair_factory): async with pair_factory(host="127.0.0.1") as pair: pair.client._passive_commands = None await pair.client.get_passive_connection( "A", commands=["pasv", "epsv"] ) @pytest.mark.asyncio async def test_extra_pasv_connection(pair_factory): async with pair_factory() as pair: r, w = await pair.client.get_passive_connection() er, ew = await pair.client.get_passive_connection() with pytest.raises((ConnectionResetError, BrokenPipeError)): while True: w.write(b"-" * aioftp.DEFAULT_BLOCK_SIZE) await w.drain() @pytest.mark.parametrize("method", ["epsv", "pasv"]) @pytest.mark.asyncio async def test_closing_passive_connection(pair_factory, method): async with pair_factory(host="127.0.0.1") as pair: r, w = await pair.client.get_passive_connection(commands=[method]) host, port, *_ = w.transport.get_extra_info("peername") nr, nw = await asyncio.open_connection(host, port) with pytest.raises((ConnectionResetError, BrokenPipeError)): while True: nw.write(b"-" * aioftp.DEFAULT_BLOCK_SIZE) await nw.drain() @pytest.mark.asyncio async def test_pasv_connection_ports_not_added(pair_factory): async with pair_factory() as pair: r, w = await pair.client.get_passive_connection() assert pair.server.available_data_ports is None @pytest.mark.asyncio async def test_pasv_connection_ports(pair_factory, Server, unused_tcp_port_factory): ports = [unused_tcp_port_factory(), unused_tcp_port_factory()] async with pair_factory(None, Server(data_ports=ports)) as pair: r, w = await pair.client.get_passive_connection() host, port, *_ = w.transport.get_extra_info("peername") assert port in ports assert pair.server.available_data_ports.qsize() == 1 @pytest.mark.asyncio async def test_data_ports_remains_empty(pair_factory, Server): async with pair_factory(None, Server(data_ports=[])) as pair: assert pair.server.available_data_ports.qsize() == 0 @pytest.mark.asyncio async def test_pasv_connection_port_reused(pair_factory, Server, unused_tcp_port): s = Server(data_ports=[unused_tcp_port]) async with pair_factory(None, s) as pair: r, w = await pair.client.get_passive_connection() host, port, *_ = w.transport.get_extra_info("peername") assert port == unused_tcp_port assert pair.server.available_data_ports.qsize() == 0 w.close() await pair.client.quit() pair.client.close() assert pair.server.available_data_ports.qsize() == 1 await pair.client.connect(pair.server.server_host, pair.server.server_port) await pair.client.login() r, w = await pair.client.get_passive_connection() host, port, *_ = w.transport.get_extra_info("peername") assert port == unused_tcp_port assert pair.server.available_data_ports.qsize() == 0 @pytest.mark.asyncio async def test_pasv_connection_pasv_forced_response_address(pair_factory, Server, unused_tcp_port): def ipv4_used(): try: ipaddress.IPv4Address(pair.host) return True except ValueError: return False async with pair_factory( server=Server(ipv4_pasv_forced_response_address='127.0.0.2'), ) as pair: assert pair.server.ipv4_pasv_forced_response_address == '127.0.0.2' if ipv4_used(): # The connection fails here because the server starts to listen for # the passive connections on the host (IPv4 address) that is used # by the control channel. In reality, if the server is behind NAT, # the server is reached with the defined external IPv4 address, # i.e. we can check that the connection to # pair.server.ipv4_pasv_forced_response_address failed to know that # the server returned correct external IP with pytest.raises(OSError): await pair.client.get_passive_connection(commands=['pasv']) # With epsv the connection should open as that does not use the # external IPv4 address but just tells the client the port to connect # to await pair.client.get_passive_connection(commands=['epsv']) @pytest.mark.parametrize("method", ["epsv", "pasv"]) @pytest.mark.asyncio async def test_pasv_connection_no_free_port(pair_factory, Server, expect_codes_in_exception, method): s = Server(data_ports=[]) async with pair_factory(None, s, do_quit=False, host="127.0.0.1") as pair: assert pair.server.available_data_ports.qsize() == 0 with expect_codes_in_exception("421"): await pair.client.get_passive_connection(commands=[method]) @pytest.mark.asyncio async def test_pasv_connection_busy_port(pair_factory, Server, unused_tcp_port_factory): ports = [unused_tcp_port_factory(), unused_tcp_port_factory()] async with pair_factory(None, Server(data_ports=ports)) as pair: conflicting_server = await asyncio.start_server( lambda r, w: w.close(), host=pair.server.server_host, port=ports[0], ) r, w = await pair.client.get_passive_connection() host, port, *_ = w.transport.get_extra_info("peername") assert port == ports[1] assert pair.server.available_data_ports.qsize() == 1 conflicting_server.close() await conflicting_server.wait_closed() @pytest.mark.asyncio async def test_pasv_connection_busy_port2(pair_factory, Server, unused_tcp_port_factory, expect_codes_in_exception): ports = [unused_tcp_port_factory()] s = Server(data_ports=ports) async with pair_factory(None, s, do_quit=False) as pair: conflicting_server = await asyncio.start_server( lambda r, w: w.close(), host=pair.server.server_host, port=ports[0], ) with expect_codes_in_exception("421"): await pair.client.get_passive_connection() conflicting_server.close() await conflicting_server.wait_closed() @pytest.mark.asyncio async def test_server_shutdown(pair_factory): async with pair_factory(do_quit=False) as pair: await pair.client.list() await pair.server.close() with pytest.raises(ConnectionResetError): await pair.client.list() @pytest.mark.asyncio async def test_client_session_context_manager(pair_factory): async with pair_factory(connected=False) as pair: async with aioftp.Client.context(*pair.server.address) as client: await client.list() @pytest.mark.asyncio async def test_long_login_sequence_fail(pair_factory, expect_codes_in_exception): class CustomServer(aioftp.Server): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.commands_mapping["acct"] = self.acct async def user(self, connection, rest): connection.response("331") return True async def pass_(self, connection, rest): connection.response("332") return True async def acct(self, connection, rest): connection.response("333") return True factory = pair_factory(logged=False, server_factory=CustomServer, do_quit=False) async with factory as pair: with expect_codes_in_exception("333"): await pair.client.login() @pytest.mark.asyncio async def test_bad_sublines_seq(pair_factory, expect_codes_in_exception): class CustomServer(aioftp.Server): async def write_response(self, stream, code, lines="", list=False): import functools lines = aioftp.wrap_with_container(lines) write = functools.partial(self.write_line, stream) *body, tail = lines for line in body: await write(code + "-" + line) await write(str(int(code) + 1) + "-" + tail) await write(code + " " + tail) factory = pair_factory(connected=False, server_factory=CustomServer) async with factory as pair: with expect_codes_in_exception("220"): await pair.client.connect(pair.server.server_host, pair.server.server_port) await pair.client.login() aioftp-0.21.4/tests/test_corner_cases.py000066400000000000000000000044541432163100400203020ustar00rootroot00000000000000from pathlib import Path import pytest import aioftp @pytest.mark.asyncio async def test_server_side_exception(pair_factory): class CustomServer(aioftp.Server): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.commands_mapping["custom"] = self.custom async def custom(*args, **kwargs): raise RuntimeError("Test error") factory = pair_factory(server_factory=CustomServer, do_quit=False) async with factory as pair: with pytest.raises(ConnectionResetError): await pair.client.command("custom", "200") @pytest.mark.asyncio async def test_bad_type_value(pair_factory, expect_codes_in_exception): async with pair_factory() as pair: with expect_codes_in_exception("502"): await pair.client.command("type FOO", "200") @pytest.mark.asyncio async def test_pbsz(pair_factory): async with pair_factory() as pair: await pair.client.command("pbsz", "200") @pytest.mark.asyncio async def test_prot(pair_factory, expect_codes_in_exception): async with pair_factory() as pair: await pair.client.command("prot P", "200") with expect_codes_in_exception("502"): await pair.client.command("prot foo", "200") @pytest.mark.asyncio async def test_server_ipv6_pasv(pair_factory, expect_codes_in_exception): async with pair_factory(host="::1", do_quit=False) as pair: with expect_codes_in_exception("503"): await pair.client.get_passive_connection(commands=["pasv"]) @pytest.mark.asyncio async def test_epsv_extra_arg(pair_factory, expect_codes_in_exception): async with pair_factory(do_quit=False) as pair: with expect_codes_in_exception("522"): await pair.client.command("epsv foo", "229") @pytest.mark.asyncio async def test_bad_server_path_io(pair_factory, Server, expect_codes_in_exception): class BadPathIO(aioftp.MemoryPathIO): async def is_file(*a, **kw): return False async def is_dir(*a, **kw): return False s = Server(path_io_factory=BadPathIO) async with pair_factory(None, s) as pair: pio = pair.server.path_io_factory() async with pio.open(Path("/foo"), "wb"): pass await pair.client.list() aioftp-0.21.4/tests/test_current_directory.py000066400000000000000000000030341432163100400213730ustar00rootroot00000000000000import pathlib import pytest import aioftp @pytest.mark.asyncio async def test_current_directory_simple(pair_factory): async with pair_factory() as pair: cwd = await pair.client.get_current_directory() assert cwd == pathlib.PurePosixPath("/") @pytest.mark.asyncio async def test_current_directory_not_default(pair_factory, Server): s = Server([aioftp.User(home_path="/home")]) async with pair_factory(None, s) as pair: cwd = await pair.client.get_current_directory() assert cwd == pathlib.PurePosixPath("/home") @pytest.mark.asyncio async def test_mlsd(pair_factory): async with pair_factory() as pair: await pair.make_server_files("test.txt") (path, stat), *_ = files = await pair.client.list() assert len(files) == 1 assert path == pathlib.PurePosixPath("test.txt") assert stat["type"] == "file" @pytest.mark.asyncio async def test_resolving_double_dots(pair_factory): async with pair_factory() as pair: await pair.make_server_files("test.txt") async def f(): cwd = await pair.client.get_current_directory() assert cwd == pathlib.PurePosixPath("/") (path, stat), *_ = files = await pair.client.list() assert len(files) == 1 assert path == pathlib.PurePosixPath("test.txt") assert stat["type"] == "file" await f() await pair.client.change_directory("../../../") await f() await pair.client.change_directory("/a/../b/..") await f() aioftp-0.21.4/tests/test_directory_actions.py000066400000000000000000000072431432163100400213570ustar00rootroot00000000000000import pathlib import pytest import aioftp @pytest.mark.asyncio async def test_create_and_remove_directory(pair_factory): async with pair_factory() as pair: await pair.client.make_directory("bar") (path, stat), *_ = files = await pair.client.list() assert len(files) == 1 assert path == pathlib.PurePosixPath("bar") assert stat["type"] == "dir" await pair.client.remove_directory("bar") assert await pair.server_paths_exists("bar") is False @pytest.mark.asyncio async def test_create_and_remove_directory_long(pair_factory): async with pair_factory() as pair: await pair.client.make_directory("bar/baz") assert await pair.server_paths_exists("bar", "bar/baz") await pair.client.remove_directory("bar/baz") await pair.client.remove_directory("bar") assert await pair.server_paths_exists("bar") is False @pytest.mark.asyncio async def test_create_directory_long_no_parents(pair_factory): async with pair_factory() as pair: await pair.client.make_directory("bar/baz", parents=False) await pair.client.remove_directory("bar/baz") await pair.client.remove_directory("bar") @pytest.mark.asyncio async def test_change_directory(pair_factory): async with pair_factory() as pair: await pair.client.make_directory("bar") await pair.client.change_directory("bar") cwd = await pair.client.get_current_directory() assert cwd == pathlib.PurePosixPath("/bar") await pair.client.change_directory() cwd = await pair.client.get_current_directory() assert cwd == pathlib.PurePosixPath("/") @pytest.mark.asyncio async def test_change_directory_not_exist(pair_factory, expect_codes_in_exception): async with pair_factory() as pair: with expect_codes_in_exception("550"): await pair.client.change_directory("bar") @pytest.mark.asyncio async def test_rename_empty_directory(pair_factory): async with pair_factory() as pair: await pair.client.make_directory("bar") assert await pair.server_paths_exists("bar") assert await pair.server_paths_exists("baz") is False await pair.client.rename("bar", "baz") assert await pair.server_paths_exists("bar") is False assert await pair.server_paths_exists("baz") @pytest.mark.asyncio async def test_rename_non_empty_directory(pair_factory): async with pair_factory() as pair: await pair.make_server_files("bar/foo.txt") assert await pair.server_paths_exists("bar/foo.txt", "bar") await pair.client.make_directory("hurr") await pair.client.rename("bar", "hurr/baz") assert await pair.server_paths_exists("hurr/baz/foo.txt") assert await pair.server_paths_exists("bar") is False class FakeErrorPathIO(aioftp.MemoryPathIO): def list(self, path): class Lister(aioftp.AbstractAsyncLister): @aioftp.pathio.universal_exception async def __anext__(self): raise Exception("KERNEL PANIC") return Lister(timeout=self.timeout) @pytest.mark.asyncio async def test_exception_in_list(pair_factory, Server, expect_codes_in_exception): s = Server(path_io_factory=FakeErrorPathIO) async with pair_factory(None, s) as pair: with expect_codes_in_exception("451"): await pair.client.list() @pytest.mark.asyncio async def test_list_recursive(pair_factory): async with pair_factory() as pair: await pair.make_server_files("foo/bar", "foo/baz/baz") files = await pair.client.list(recursive=True) assert len(files) == 4 aioftp-0.21.4/tests/test_extra.py000066400000000000000000000022601432163100400167500ustar00rootroot00000000000000import pytest import aioftp @pytest.mark.asyncio async def test_stream_iter_by_line(pair_factory): async with pair_factory() as pair: await pair.client.make_directory("bar") lines = [] async with pair.client.get_stream("list") as stream: async for line in stream.iter_by_line(): lines.append(line) assert len(lines) == 1 assert b"bar" in lines[0] @pytest.mark.asyncio async def test_stream_close_without_finish(pair_factory): class CustomException(Exception): pass def fake_finish(*a, **kw): raise Exception("Finished called") async with pair_factory() as pair: with pytest.raises(CustomException): async with pair.client.get_stream(): raise CustomException() @pytest.mark.asyncio async def test_no_server(unused_tcp_port): with pytest.raises(OSError): async with aioftp.Client.context("127.0.0.1", unused_tcp_port): pass @pytest.mark.asyncio async def test_syst_command(pair_factory): async with pair_factory() as pair: code, info = await pair.client.command("syst", "215") assert info == [" UNIX Type: L8"] aioftp-0.21.4/tests/test_file.py000066400000000000000000000221501432163100400165440ustar00rootroot00000000000000import math import datetime as dt from pathlib import PurePosixPath import pytest import aioftp @pytest.mark.asyncio async def test_remove_single_file(pair_factory): async with pair_factory() as pair: await pair.make_server_files("foo.txt") assert await pair.server_paths_exists("foo.txt") await pair.client.remove_file("foo.txt") assert await pair.server_paths_exists("foo.txt") is False @pytest.mark.asyncio async def test_recursive_remove(pair_factory): async with pair_factory() as pair: paths = ["foo/bar.txt", "foo/baz.txt", "foo/bar_dir/foo.baz"] await pair.make_server_files(*paths) await pair.client.remove("foo") assert await pair.server_paths_exists(*paths) is False @pytest.mark.asyncio async def test_mlsd_file(pair_factory): async with pair_factory() as pair: await pair.make_server_files("foo/bar.txt") result = await pair.client.list("foo/bar.txt") assert len(result) == 0 @pytest.mark.asyncio async def test_file_download(pair_factory): async with pair_factory() as pair: await pair.make_server_files("foo", size=1, atom=b"foobar") async with pair.client.download_stream("foo") as stream: data = await stream.read() assert data == b"foobar" @pytest.mark.asyncio async def test_file_download_exactly(pair_factory): async with pair_factory() as pair: await pair.make_server_files("foo", size=1, atom=b"foobar") async with pair.client.download_stream("foo") as stream: data1 = await stream.readexactly(3) data2 = await stream.readexactly(3) assert (data1, data2) == (b"foo", b"bar") @pytest.mark.asyncio async def test_file_download_enhanced_passive(pair_factory): async with pair_factory() as pair: pair.client._passive_commands = ["epsv"] await pair.make_server_files("foo", size=1, atom=b"foobar") async with pair.client.download_stream("foo") as stream: data = await stream.read() assert data == b"foobar" @pytest.mark.asyncio async def test_file_upload(pair_factory): async with pair_factory() as pair: async with pair.client.upload_stream("foo") as stream: await stream.write(b"foobar") async with pair.client.download_stream("foo") as stream: data = await stream.read() assert data == b"foobar" @pytest.mark.asyncio async def test_file_append(pair_factory): async with pair_factory() as pair: await pair.make_server_files("foo", size=1, atom=b"foobar") async with pair.client.append_stream("foo") as stream: await stream.write(b"foobar") async with pair.client.download_stream("foo") as stream: data = await stream.read() assert data == b"foobar" * 2 @pytest.mark.asyncio async def test_upload_folder(pair_factory): async with pair_factory() as pair: paths = ["foo/bar", "foo/baz"] await pair.make_client_files(*paths) assert await pair.server_paths_exists(*paths) is False await pair.client.upload("foo") assert await pair.server_paths_exists(*paths) @pytest.mark.asyncio async def test_upload_folder_into(pair_factory): async with pair_factory() as pair: paths = ["foo/bar", "foo/baz"] await pair.make_client_files(*paths) assert await pair.server_paths_exists("bar", "baz") is False await pair.client.upload("foo", write_into=True) assert await pair.server_paths_exists("bar", "baz") @pytest.mark.asyncio async def test_upload_folder_into_another(pair_factory): async with pair_factory() as pair: paths = ["foo/bar", "foo/baz"] await pair.make_client_files(*paths) assert await pair.server_paths_exists("bar/bar", "bar/baz") is False await pair.client.upload("foo", "bar", write_into=True) assert await pair.server_paths_exists("bar/bar", "bar/baz") @pytest.mark.asyncio async def test_download_folder(pair_factory): async with pair_factory() as pair: paths = ["foo/bar", "foo/baz"] await pair.make_server_files(*paths) assert await pair.client_paths_exists(*paths) is False await pair.client.download("foo") assert await pair.client_paths_exists(*paths) @pytest.mark.asyncio async def test_download_folder_into(pair_factory): async with pair_factory() as pair: paths = ["foo/bar", "foo/baz"] await pair.make_server_files(*paths) assert await pair.client_paths_exists("bar", "baz") is False await pair.client.download("foo", write_into=True) assert await pair.client_paths_exists("bar", "baz") @pytest.mark.asyncio async def test_download_folder_into_another(pair_factory): async with pair_factory() as pair: paths = ["foo/bar", "foo/baz"] await pair.make_server_files(*paths) assert await pair.client_paths_exists("bar/bar", "bar/baz") is False await pair.client.download("foo", "bar", write_into=True) assert await pair.client_paths_exists("bar/bar", "bar/baz") @pytest.mark.asyncio async def test_upload_file_over(pair_factory): async with pair_factory() as pair: await pair.make_client_files("foo", size=1, atom=b"client") await pair.make_server_files("foo", size=1, atom=b"server") async with pair.client.download_stream("foo") as stream: assert await stream.read() == b"server" await pair.client.upload("foo") async with pair.client.download_stream("foo") as stream: assert await stream.read() == b"client" @pytest.mark.asyncio async def test_download_file_over(pair_factory): async with pair_factory() as pair: await pair.make_client_files("foo", size=1, atom=b"client") await pair.make_server_files("foo", size=1, atom=b"server") async with pair.client.path_io.open(PurePosixPath("foo")) as f: assert await f.read() == b"client" await pair.client.download("foo") async with pair.client.path_io.open(PurePosixPath("foo")) as f: assert await f.read() == b"server" @pytest.mark.asyncio async def test_upload_file_write_into(pair_factory): async with pair_factory() as pair: await pair.make_client_files("foo", size=1, atom=b"client") await pair.make_server_files("bar", size=1, atom=b"server") async with pair.client.download_stream("bar") as stream: assert await stream.read() == b"server" await pair.client.upload("foo", "bar", write_into=True) async with pair.client.download_stream("bar") as stream: assert await stream.read() == b"client" @pytest.mark.asyncio async def test_upload_tree(pair_factory): async with pair_factory() as pair: await pair.make_client_files("foo/bar/baz", size=1, atom=b"client") await pair.client.upload("foo", "bar", write_into=True) files = await pair.client.list(recursive=True) assert len(files) == 3 @pytest.mark.asyncio async def test_download_file_write_into(pair_factory): async with pair_factory() as pair: await pair.make_client_files("foo", size=1, atom=b"client") await pair.make_server_files("bar", size=1, atom=b"server") async with pair.client.path_io.open(PurePosixPath("foo")) as f: assert await f.read() == b"client" await pair.client.download("bar", "foo", write_into=True) async with pair.client.path_io.open(PurePosixPath("foo")) as f: assert await f.read() == b"server" @pytest.mark.asyncio async def test_upload_file_os_error(pair_factory, Server, expect_codes_in_exception): class OsErrorPathIO(aioftp.MemoryPathIO): @aioftp.pathio.universal_exception async def write(self, fout, data): raise OSError("test os error") s = Server(path_io_factory=OsErrorPathIO) async with pair_factory(None, s) as pair: with expect_codes_in_exception("451"): async with pair.client.upload_stream("foo") as stream: await stream.write(b"foobar") @pytest.mark.asyncio async def test_upload_path_unreachable(pair_factory, expect_codes_in_exception): async with pair_factory() as pair: with expect_codes_in_exception("550"): async with pair.client.upload_stream("foo/bar/foo") as stream: await stream.write(b"foobar") @pytest.mark.asyncio async def test_stat_when_no_mlst(pair_factory): async with pair_factory() as pair: pair.server.commands_mapping.pop("mlst") await pair.make_server_files("foo") info = await pair.client.stat("foo") assert info["type"] == "file" @pytest.mark.asyncio async def test_stat_mlst(pair_factory): async with pair_factory() as pair: now = dt.datetime.utcnow() await pair.make_server_files("foo") info = await pair.client.stat("foo") assert info["type"] == "file" for fact in ("modify", "create"): received = dt.datetime.strptime(info[fact], "%Y%m%d%H%M%S") assert math.isclose(now.timestamp(), received.timestamp(), abs_tol=10) aioftp-0.21.4/tests/test_list_fallback.py000066400000000000000000000120631432163100400204210ustar00rootroot00000000000000import pathlib import textwrap import contextlib import pytest import aioftp async def not_implemented(connection, rest): connection.response("502", ":P") return True @pytest.mark.asyncio async def test_client_fallback_to_list_at_list(pair_factory): async with pair_factory() as pair: pair.server.commands_mapping["mlst"] = not_implemented pair.server.commands_mapping["mlsd"] = not_implemented await pair.make_server_files("bar/foo") (path, stat), *_ = files = await pair.client.list() assert len(files) == 1 assert path == pathlib.PurePosixPath("bar") assert stat["type"] == "dir" (path, stat), *_ = files = await pair.client.list("bar") assert len(files) == 1 assert path == pathlib.PurePosixPath("bar/foo") assert stat["type"] == "file" result = await pair.client.list("bar/foo") assert len(result) == 0 async def implemented_badly(connection, rest): assert False, "should not be called" @pytest.mark.asyncio async def test_client_list_override(pair_factory): async with pair_factory() as pair: pair.server.commands_mapping["mlsd"] = implemented_badly await pair.client.make_directory("bar") (path, stat), *_ = files = await pair.client.list(raw_command="LIST") assert len(files) == 1 assert path == pathlib.PurePosixPath("bar") assert stat["type"] == "dir" @pytest.mark.asyncio async def test_client_list_override_invalid_raw_command(pair_factory): async with pair_factory() as pair: with pytest.raises(ValueError): await pair.client.list(raw_command="FOO") def test_client_list_windows(): test_str = textwrap.dedent("""\ 11/4/2018 9:09 PM . 8/10/2018 1:02 PM .. 9/23/2018 2:16 PM bin 10/16/2018 10:25 PM Desktop 11/4/2018 3:31 PM dow 10/16/2018 8:21 PM Downloads 10/14/2018 5:34 PM msc 9/9/2018 9:32 AM opt 10/3/2018 2:58 PM 34,359,738,368 win10.img 6/30/2018 8:36 AM 3,939,237,888 win10.iso 7/26/2018 1:11 PM 189 win10.sh 10/29/2018 11:46 AM 34,359,738,368 win7.img 6/30/2018 8:35 AM 3,319,791,616 win7.iso 10/29/2018 10:55 AM 219 win7.sh 6 files 75,978,506,648 bytes 3 directories 22,198,362,112 bytes free """) test_str = test_str.strip().split("\n") entities = {} parse = aioftp.Client(encoding="utf-8").parse_list_line_windows for x in test_str: with contextlib.suppress(ValueError): path, stat = parse(x.encode("utf-8")) entities[path] = stat dirs = ["bin", "Desktop", "dow", "Downloads", "msc", "opt"] files = ["win10.img", "win10.iso", "win10.sh", "win7.img", "win7.iso", "win7.sh"] assert len(entities) == len(dirs + files) for d in dirs: p = pathlib.PurePosixPath(d) assert p in entities assert entities[p]["type"] == "dir" for f in files: p = pathlib.PurePosixPath(f) assert p in entities assert entities[p]["type"] == "file" with pytest.raises(ValueError): parse(b" 10/3/2018 2:58 PM 34,35xxx38,368 win10.img") @pytest.mark.asyncio async def test_client_list_override_with_custom(pair_factory, Client): meta = {"type": "file", "works": True} def parser(b): import pickle return pickle.loads(bytes.fromhex(b.decode().rstrip("\r\n"))) async def builder(_, path): import pickle return pickle.dumps((path, meta)).hex() async with pair_factory(Client(parse_list_line_custom=parser)) as pair: pair.server.commands_mapping["mlst"] = not_implemented pair.server.commands_mapping["mlsd"] = not_implemented pair.server.build_list_string = builder await pair.client.make_directory("bar") (path, stat), *_ = files = await pair.client.list() assert len(files) == 1 assert path == pathlib.PurePosixPath("bar") assert stat == meta @pytest.mark.asyncio async def test_client_list_override_with_custom_last(pair_factory, Client): meta = {"type": "file", "works": True} def parser(b): import pickle return pickle.loads(bytes.fromhex(b.decode().rstrip("\r\n"))) async def builder(_, path): import pickle return pickle.dumps((path, meta)).hex() client = Client( parse_list_line_custom=parser, parse_list_line_custom_first=False, ) async with pair_factory(client) as pair: pair.server.commands_mapping["mlst"] = not_implemented pair.server.commands_mapping["mlsd"] = not_implemented pair.server.build_list_string = builder await pair.client.make_directory("bar") (path, stat), *_ = files = await pair.client.list() assert len(files) == 1 assert path == pathlib.PurePosixPath("bar") assert stat == meta aioftp-0.21.4/tests/test_login.py000066400000000000000000000041261432163100400167400ustar00rootroot00000000000000import pytest import aioftp @pytest.mark.asyncio async def test_client_list_override(pair_factory, expect_codes_in_exception): async with pair_factory(logged=False, do_quit=False) as pair: with expect_codes_in_exception("503"): await pair.client.get_current_directory() @pytest.mark.asyncio async def test_anonymous_login(pair_factory): async with pair_factory(): pass @pytest.mark.asyncio async def test_login_with_login_data(pair_factory): async with pair_factory(logged=False) as pair: await pair.client.login("foo", "bar") @pytest.mark.asyncio async def test_login_with_login_and_no_password(pair_factory, Server): s = Server([aioftp.User("foo")]) async with pair_factory(None, s, logged=False) as pair: await pair.client.login("foo") @pytest.mark.asyncio async def test_login_with_login_and_password(pair_factory, Server): s = Server([aioftp.User("foo", "bar")]) async with pair_factory(None, s, logged=False) as pair: await pair.client.login("foo", "bar") @pytest.mark.asyncio async def test_login_with_login_and_password_no_such_user( pair_factory, Server, expect_codes_in_exception): s = Server([aioftp.User("foo", "bar")]) async with pair_factory(None, s, logged=False) as pair: with expect_codes_in_exception("530"): await pair.client.login("fo", "bar") @pytest.mark.asyncio async def test_login_with_login_and_password_bad_password( pair_factory, Server, expect_codes_in_exception): s = Server([aioftp.User("foo", "bar")]) async with pair_factory(None, s, logged=False) as pair: with expect_codes_in_exception("530"): await pair.client.login("foo", "baz") @pytest.mark.asyncio async def test_pass_after_login(pair_factory, Server, expect_codes_in_exception): s = Server([aioftp.User("foo", "bar")]) async with pair_factory(None, s, logged=False) as pair: await pair.client.login("foo", "bar") with expect_codes_in_exception("503"): await pair.client.command("PASS baz", ("230", "33x")) aioftp-0.21.4/tests/test_maximum_connections.py000066400000000000000000000056631432163100400217160ustar00rootroot00000000000000import functools import pytest import aioftp @pytest.mark.asyncio async def test_multiply_connections_no_limits(pair_factory): Client = functools.partial(aioftp.Client, path_io_factory=aioftp.MemoryPathIO) async with pair_factory() as pair: s = pair.server clients = [Client() for _ in range(4)] for c in clients: await c.connect(s.server_host, s.server_port) await c.login() for c in clients: await c.quit() @pytest.mark.asyncio async def test_multiply_connections_limited_error(pair_factory, Server, expect_codes_in_exception): Client = functools.partial(aioftp.Client, path_io_factory=aioftp.MemoryPathIO) s = Server(maximum_connections=4) async with pair_factory(None, s) as pair: s = pair.server clients = [Client() for _ in range(4)] for c in clients[:-1]: await c.connect(s.server_host, s.server_port) await c.login() with expect_codes_in_exception("421"): await clients[-1].connect(s.server_host, s.server_port) for c in clients[:-1]: await c.quit() @pytest.mark.asyncio async def test_multiply_user_commands(pair_factory, Server): s = Server(maximum_connections=1) async with pair_factory(None, s) as pair: for _ in range(10): await pair.client.login() @pytest.mark.asyncio async def test_multiply_connections_with_user_limited_error( pair_factory, Server, expect_codes_in_exception): Client = functools.partial(aioftp.Client, path_io_factory=aioftp.MemoryPathIO) s = Server([aioftp.User("foo", maximum_connections=4)]) async with pair_factory(None, s, connected=False) as pair: s = pair.server clients = [Client() for _ in range(5)] for c in clients[:-1]: await c.connect(s.server_host, s.server_port) await c.login("foo") await clients[-1].connect(s.server_host, s.server_port) with expect_codes_in_exception("530"): await clients[-1].login("foo") for c in clients[:-1]: await c.quit() @pytest.mark.asyncio async def test_multiply_connections_relogin_balanced( pair_factory, Server, expect_codes_in_exception): Client = functools.partial(aioftp.Client, path_io_factory=aioftp.MemoryPathIO) s = Server(maximum_connections=4) async with pair_factory(None, s, connected=False) as pair: s = pair.server clients = [Client() for _ in range(5)] for c in clients[:-1]: await c.connect(s.server_host, s.server_port) await c.login() await clients[0].quit() await clients[-1].connect(s.server_host, s.server_port) await clients[-1].login() for c in clients[1:]: await c.quit() aioftp-0.21.4/tests/test_passive.py000066400000000000000000000052711432163100400173040ustar00rootroot00000000000000import pytest async def not_implemented(connection, rest): connection.response("502", ":P") return True @pytest.mark.asyncio async def test_client_fallback_to_pasv_at_list(pair_factory): async with pair_factory(host="127.0.0.1") as pair: pair.server.epsv = not_implemented await pair.client.list() @pytest.mark.asyncio async def test_client_fail_fallback_to_pasv_at_list(pair_factory, expect_codes_in_exception): async with pair_factory(host="127.0.0.1") as pair: pair.server.commands_mapping["epsv"] = not_implemented with expect_codes_in_exception("502"): await pair.client.get_passive_connection(commands=["epsv"]) with expect_codes_in_exception("502"): pair.client._passive_commands = ["epsv"] await pair.client.get_passive_connection() @pytest.mark.asyncio async def test_client_only_passive_list(pair_factory): async with pair_factory(host="127.0.0.1") as pair: pair.client._passive_commands = ["pasv"] await pair.client.list() @pytest.mark.asyncio async def test_client_only_enhanced_passive_list(pair_factory): async with pair_factory(host="127.0.0.1") as pair: pair.client._passive_commands = ["epsv"] await pair.client.list() @pytest.mark.asyncio async def test_passive_no_choices(pair_factory): async with pair_factory() as pair: pair.client._passive_commands = [] with pytest.raises(ValueError): await pair.client.get_passive_connection(commands=[]) @pytest.mark.asyncio async def test_passive_bad_choices(pair_factory): async with pair_factory() as pair: pair.server.epsv = not_implemented with pytest.raises(ValueError): await pair.client.get_passive_connection(commands=["FOO"]) @pytest.mark.parametrize("method", [("pasv", "227"), ("epsv", "229")]) @pytest.mark.asyncio async def test_passive_multicall(pair_factory, method): async with pair_factory(host="127.0.0.1") as pair: code, info = await pair.client.command(*method) assert "created" in info[0] code, info = await pair.client.command(*method) assert "exists" in info[0] @pytest.mark.parametrize("method", ["pasv", "epsv"]) @pytest.mark.asyncio async def test_passive_closed_on_recall(pair_factory, method): async with pair_factory(host="127.0.0.1") as pair: r, w = await pair.client.get_passive_connection(commands=[method]) nr, nw = await pair.client.get_passive_connection(commands=[method]) with pytest.raises((ConnectionResetError, BrokenPipeError)): while True: w.write(b"-") await w.drain() aioftp-0.21.4/tests/test_pathio.py000066400000000000000000000210741432163100400171150ustar00rootroot00000000000000import contextlib import pytest import aioftp @contextlib.contextmanager def universal_exception_reason(*exc): try: yield except aioftp.PathIOError as e: type, instance, traceback = e.reason m = f"Expect one of {exc}, got {instance}" assert isinstance(instance, exc), m else: raise Exception(f"No excepton. Expect one of {exc}") def test_has_state(path_io): assert hasattr(path_io, "state") path_io.state @pytest.mark.asyncio async def test_exists(path_io, temp_dir): assert await path_io.exists(temp_dir) assert not await path_io.exists(temp_dir / "foo") @pytest.mark.asyncio async def test_is_dir(path_io, temp_dir): assert await path_io.is_dir(temp_dir) p = temp_dir / "foo" async with path_io.open(p, mode="wb"): pass assert not await path_io.is_dir(p) @pytest.mark.asyncio async def test_is_file(path_io, temp_dir): p = temp_dir / "foo" assert not await path_io.is_file(temp_dir) async with path_io.open(p, mode="wb"): pass assert await path_io.is_file(p) @pytest.mark.asyncio async def test_mkdir(path_io, temp_dir): p = temp_dir / "foo" assert not await path_io.exists(p) await path_io.mkdir(p) assert await path_io.exists(p) @pytest.mark.asyncio async def test_rmdir(path_io, temp_dir): p = temp_dir / "foo" assert not await path_io.exists(p) await path_io.mkdir(p) assert await path_io.exists(p) await path_io.rmdir(p) assert not await path_io.exists(p) @pytest.mark.asyncio async def test_ulink(path_io, temp_dir): p = temp_dir / "foo" assert not await path_io.exists(p) async with path_io.open(p, mode="wb"): pass assert await path_io.exists(p) await path_io.unlink(p) assert not await path_io.exists(p) @pytest.mark.asyncio async def test_list(path_io, temp_dir): d, f = temp_dir / "dir", temp_dir / "file" await path_io.mkdir(d) async with path_io.open(f, mode="wb"): pass paths = await path_io.list(temp_dir) assert len(paths) == 2 assert set(paths) == {d, f} paths = set() async for p in path_io.list(temp_dir): paths.add(p) assert set(paths) == {d, f} @pytest.mark.asyncio async def test_stat(path_io, temp_dir): stat = await path_io.stat(temp_dir) for a in ["st_size", "st_mtime", "st_ctime", "st_nlink", "st_mode"]: assert hasattr(stat, a) @pytest.mark.asyncio async def test_open_context(path_io, temp_dir): p = temp_dir / "context" async with path_io.open(p, mode="wb") as f: await f.write(b"foo") async with path_io.open(p, mode="rb") as f: assert await f.read() == b"foo" async with path_io.open(p, mode="ab") as f: await f.write(b"bar") async with path_io.open(p, mode="rb") as f: assert await f.read() == b"foobar" async with path_io.open(p, mode="wb") as f: await f.write(b"foo") async with path_io.open(p, mode="rb") as f: assert await f.read() == b"foo" async with path_io.open(p, mode="r+b") as f: assert await f.read(1) == b"f" await f.write(b"un") async with path_io.open(p, mode="rb") as f: assert await f.read() == b"fun" @pytest.mark.asyncio async def test_open_plain(path_io, temp_dir): p = temp_dir / "plain" f = await path_io.open(p, mode="wb") await f.write(b"foo") await f.close() f = await path_io.open(p, mode="rb") assert await f.read() == b"foo" await f.close() f = await path_io.open(p, mode="ab") await f.write(b"bar") await f.close() f = await path_io.open(p, mode="rb") assert await f.read() == b"foobar" await f.close() f = await path_io.open(p, mode="wb") await f.write(b"foo") await f.close() f = await path_io.open(p, mode="rb") assert await f.read() == b"foo" await f.close() f = await path_io.open(p, mode="r+b") assert await f.read(1) == b"f" await f.write(b"un") await f.close() f = await path_io.open(p, mode="rb") assert await f.read() == b"fun" await f.close() @pytest.mark.asyncio async def test_file_methods(path_io, temp_dir): p = temp_dir / "foo" async with path_io.open(p, mode="wb") as f: await f.write(b"foo") with universal_exception_reason(ValueError): await path_io.seek(f, 0) await f.seek(0) await f.write(b"bar") async with path_io.open(p, mode="rb") as f: assert await f.read() == b"bar" @pytest.mark.asyncio async def test_rename(path_io, temp_dir): old = temp_dir / "foo" new = temp_dir / "bar" await path_io.mkdir(old) assert await path_io.exists(old) assert not await path_io.exists(new) await path_io.rename(old, new) assert not await path_io.exists(old) assert await path_io.exists(new) def test_repr_works(path_io, temp_dir): repr(path_io) @pytest.mark.asyncio async def test_path_over_file(path_io, temp_dir): f = temp_dir / "file" async with path_io.open(f, mode="wb"): pass assert not await path_io.exists(f / "dir") @pytest.mark.asyncio async def test_mkdir_over_file(path_io, temp_dir): f = temp_dir / "file" async with path_io.open(f, mode="wb"): pass with universal_exception_reason(FileExistsError): await path_io.mkdir(f) @pytest.mark.asyncio async def test_mkdir_no_parents(path_io, temp_dir): p = temp_dir / "foo" / "bar" with universal_exception_reason(FileNotFoundError): await path_io.mkdir(p) @pytest.mark.asyncio async def test_mkdir_parent_is_file(path_io, temp_dir): f = temp_dir / "foo" async with path_io.open(f, mode="wb"): pass with universal_exception_reason(NotADirectoryError): await path_io.mkdir(f / "bar") @pytest.mark.asyncio async def test_mkdir_parent_is_file_with_parents(path_io, temp_dir): f = temp_dir / "foo" async with path_io.open(f, mode="wb"): pass with universal_exception_reason(NotADirectoryError): await path_io.mkdir(f / "bar", parents=True) @pytest.mark.asyncio async def test_rmdir_not_exist(path_io, temp_dir): with universal_exception_reason(FileNotFoundError): await path_io.rmdir(temp_dir / "foo") @pytest.mark.asyncio async def test_rmdir_on_file(path_io, temp_dir): f = temp_dir / "foo" async with path_io.open(f, mode="wb"): pass with universal_exception_reason(NotADirectoryError): await path_io.rmdir(f) @pytest.mark.asyncio async def test_rmdir_not_empty(path_io, temp_dir): f = temp_dir / "foo" async with path_io.open(f, mode="wb"): pass with universal_exception_reason(OSError): await path_io.rmdir(temp_dir) @pytest.mark.asyncio async def test_unlink_not_exist(path_io, temp_dir): with universal_exception_reason(FileNotFoundError): await path_io.unlink(temp_dir / "foo") @pytest.mark.asyncio async def test_unlink_on_dir(path_io, temp_dir): with universal_exception_reason(IsADirectoryError, PermissionError): await path_io.unlink(temp_dir) @pytest.mark.asyncio async def test_list_not_exist(path_io, temp_dir): assert await path_io.list(temp_dir / "foo") == [] @pytest.mark.asyncio async def test_stat_not_exist(path_io, temp_dir): with universal_exception_reason(FileNotFoundError): await path_io.stat(temp_dir / "foo") @pytest.mark.asyncio async def test_open_read_not_exist(path_io, temp_dir): with universal_exception_reason(FileNotFoundError): await path_io.open(temp_dir / "foo", mode="rb") @pytest.mark.asyncio async def test_open_write_unreachable(path_io, temp_dir): with universal_exception_reason(FileNotFoundError): await path_io.open(temp_dir / "foo" / "bar", mode="wb") @pytest.mark.asyncio async def test_open_write_directory(path_io, temp_dir): with universal_exception_reason(IsADirectoryError): await path_io.open(temp_dir, mode="wb") @pytest.mark.asyncio async def test_open_bad_mode(path_io, temp_dir): with universal_exception_reason(ValueError): await path_io.open(temp_dir, mode="bad") @pytest.mark.asyncio async def test_rename_source_or_dest_parent_not_exist(path_io, temp_dir): with universal_exception_reason(FileNotFoundError): await path_io.rename(temp_dir / "foo", temp_dir / "bar") with universal_exception_reason(FileNotFoundError): await path_io.rename(temp_dir, temp_dir / "foo" / "bar") @pytest.mark.asyncio async def test_rename_over_exists(path_io, temp_dir): source = temp_dir / "source" destination = temp_dir / "destination" await path_io.mkdir(source) await path_io.mkdir(destination) await path_io.rename(source, destination) aioftp-0.21.4/tests/test_permissions.py000066400000000000000000000014721432163100400202040ustar00rootroot00000000000000import pytest import aioftp @pytest.mark.asyncio async def test_permission_denied(pair_factory, Server, expect_codes_in_exception): s = Server([ aioftp.User(permissions=[aioftp.Permission(writable=False)]) ]) async with pair_factory(None, s) as pair: with expect_codes_in_exception("550"): await pair.client.make_directory("foo") @pytest.mark.asyncio async def test_permission_overriden(pair_factory, Server): s = Server([ aioftp.User( permissions=[ aioftp.Permission("/", writable=False), aioftp.Permission("/foo"), ] ) ]) async with pair_factory(None, s) as pair: await pair.client.make_directory("foo") await pair.client.remove_directory("foo") aioftp-0.21.4/tests/test_restart.py000066400000000000000000000034341432163100400173150ustar00rootroot00000000000000import pytest @pytest.mark.parametrize("offset", [0, 3, 10]) @pytest.mark.asyncio async def test_restart_retr(pair_factory, offset): async with pair_factory() as pair: atom = b"foobar" name = "foo.txt" await pair.make_server_files(name, size=1, atom=atom) async with pair.client.download_stream(name, offset=offset) as stream: assert await stream.read() == atom[offset:] @pytest.mark.parametrize("offset", [1, 3, 10]) @pytest.mark.parametrize("method", ["upload_stream", "append_stream"]) @pytest.mark.asyncio async def test_restart_stor_appe(pair_factory, offset, method): async with pair_factory() as pair: atom = b"foobar" name = "foo.txt" insert = b"123" expect = atom[:offset] + b"\x00" * (offset - len(atom)) + insert + \ atom[offset + len(insert):] await pair.make_server_files(name, size=1, atom=atom) stream_factory = getattr(pair.client, method) async with stream_factory(name, offset=offset) as stream: await stream.write(b"123") async with pair.client.download_stream(name) as stream: assert await stream.read() == expect @pytest.mark.asyncio async def test_restart_reset(pair_factory): async with pair_factory() as pair: atom = b"foobar" name = "foo.txt" await pair.make_server_files(name, size=1, atom=atom) await pair.client.command("REST 3", "350") async with pair.client.download_stream("foo.txt") as stream: assert await stream.read() == atom @pytest.mark.asyncio async def test_restart_syntax_error(pair_factory, expect_codes_in_exception): async with pair_factory() as pair: with expect_codes_in_exception("501"): await pair.client.command("REST 3abc", "350") aioftp-0.21.4/tests/test_simple_functions.py000066400000000000000000000167701432163100400212210ustar00rootroot00000000000000import pathlib import asyncio import datetime import itertools import pytest import aioftp def test_parse_directory_response(): s = 'foo "baz "" test nop" """""fdfs """' parsed = aioftp.Client.parse_directory_response(s) assert parsed == pathlib.PurePosixPath('baz " test nop') def test_connection_del_future(): loop = asyncio.new_event_loop() c = aioftp.Connection(loop=loop) c.foo = "bar" del c.future.foo def test_connection_not_in_storage(): loop = asyncio.new_event_loop() c = aioftp.Connection(loop=loop) with pytest.raises(AttributeError): getattr(c, "foo") def test_available_connections_too_much_acquires(): ac = aioftp.AvailableConnections(3) ac.acquire() ac.acquire() ac.acquire() with pytest.raises(ValueError): ac.acquire() def test_available_connections_too_much_releases(): ac = aioftp.AvailableConnections(3) ac.acquire() ac.release() with pytest.raises(ValueError): ac.release() def test_parse_pasv_response(): p = aioftp.Client.parse_pasv_response assert p("(192,168,1,0,1,0)") == ("192.168.1.0", 256) def test_parse_epsv_response(): p = aioftp.Client.parse_epsv_response assert p("some text (ha-ha) (|||665|) ((((666() (|fd667s).") == (None, 666) assert p("some text (ha-ha) (|||665|) (6666666).") == (None, 666) def _c_locale_time(d, format="%b %d %H:%M"): with aioftp.common.setlocale("C"): return d.strftime(format) def test_parse_ls_date_of_leap_year(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") p = aioftp.Client.parse_ls_date # Leap year date to test d = datetime.datetime(year=2000, month=2, day=29) current_and_expected_dates = ( # 2016 (leap) ( datetime.datetime(year=2016, month=2, day=29), datetime.datetime(year=2016, month=2, day=29) ), # 2017 ( datetime.datetime(year=2017, month=2, day=28), datetime.datetime(year=2016, month=2, day=29) ), ( datetime.datetime(year=2017, month=3, day=1), datetime.datetime(year=2016, month=2, day=29) ), # 2018 ( datetime.datetime(year=2018, month=2, day=28), datetime.datetime(year=2016, month=2, day=29) ), ( datetime.datetime(year=2018, month=3, day=1), datetime.datetime(year=2020, month=2, day=29) ), # 2019 ( datetime.datetime(year=2019, month=2, day=28), datetime.datetime(year=2020, month=2, day=29) ), ( datetime.datetime(year=2019, month=3, day=1), datetime.datetime(year=2020, month=2, day=29) ), # 2020 (leap) ( datetime.datetime(year=2020, month=2, day=29), datetime.datetime(year=2020, month=2, day=29) ), ) for now, expected in current_and_expected_dates: assert p(_c_locale_time(d), now=now) == date_to_p(expected) def test_parse_ls_date_not_older_than_6_month_format(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") p = aioftp.Client.parse_ls_date dates = ( datetime.datetime(year=2002, month=1, day=1), datetime.datetime(year=2002, month=12, day=31), ) dt = datetime.timedelta(seconds=15778476 // 2) deltas = (datetime.timedelta(), dt, -dt) for now, delta in itertools.product(dates, deltas): d = now + delta assert p(_c_locale_time(d), now=now) == date_to_p(d) def test_parse_ls_date_older_than_6_month_format(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") p = aioftp.Client.parse_ls_date dates = ( datetime.datetime(year=2002, month=1, day=1), datetime.datetime(year=2002, month=12, day=31), ) dt = datetime.timedelta(seconds=15778476, days=30) deltas = (dt, -dt) for now, delta in itertools.product(dates, deltas): d = now + delta if delta.total_seconds() > 0: expect = date_to_p(d.replace(year=d.year - 1)) else: expect = date_to_p(d.replace(year=d.year + 1)) assert p(_c_locale_time(d), now=now) == expect def test_parse_ls_date_short(): def date_to_p(d): return d.strftime("%Y%m%d%H%M00") p = aioftp.Client.parse_ls_date dates = ( datetime.datetime(year=2002, month=1, day=1), datetime.datetime(year=2002, month=12, day=31), ) for d in dates: s = _c_locale_time(d, format="%b %d %Y") assert p(s) == date_to_p(d) def test_parse_list_line_unix(): lines = { "file": [ "-rw-rw-r-- 1 poh poh 6595 Feb 27 04:14 history.rst", "lrwxrwxrwx 1 poh poh 6 Mar 23 05:46 link-tmp.py -> tmp.py", ], "dir": [ "drw-rw-r-- 1 poh poh 6595 Feb 27 04:14 history.rst", "drw-rw-r-- 1 poh poh 6595 Jan 03 2016 changes.rst", "drw-rw-r-- 1 poh poh 6595 Mar 10 1996 README.rst", ], "unknown": [ "Erw-rw-r-- 1 poh poh 6595 Feb 27 04:14 history.rst", ] } p = aioftp.Client(encoding="utf-8").parse_list_line_unix for t, stack in lines.items(): for line in stack: _, parsed = p(line.encode("utf-8")) assert parsed["type"] == t with pytest.raises(ValueError): p(b"-rw-rw-r-- 1 poh poh 6xx5 Feb 27 04:14 history.rst") with pytest.raises(ValueError): p(b"-rw-rw-r-- xx poh poh 6595 Feb 27 04:14 history.rst") @pytest.mark.parametrize("owner", ["s", "x", "-", "E"]) @pytest.mark.parametrize("group", ["s", "x", "-", "E"]) @pytest.mark.parametrize("others", ["t", "x", "-", "E"]) @pytest.mark.parametrize("read", ["r", "-"]) @pytest.mark.parametrize("write", ["w", "-"]) def test_parse_unix_mode(owner, group, others, read, write): s = f"{read}{write}{owner}{read}{write}{group}{read}{write}{others}" if "E" in {owner, group, others}: with pytest.raises(ValueError): aioftp.Client.parse_unix_mode(s) else: assert isinstance(aioftp.Client.parse_unix_mode(s), int) def test_parse_list_line_failed(): with pytest.raises(ValueError): aioftp.Client(encoding="utf-8").parse_list_line(b"what a hell?!") def test_reprs_works(): repr(aioftp.Throttle()) repr(aioftp.Permission()) repr(aioftp.User()) def test_throttle_reset(): t = aioftp.Throttle(limit=1, reset_rate=1) t.append(b"-" * 3, 0) assert t._start == 0 assert t._sum == 3 t.append(b"-" * 3, 2) assert t._start == 2 assert t._sum == 4 def test_permission_is_parent(): p = aioftp.Permission("/foo/bar") assert p.is_parent(pathlib.PurePosixPath("/foo/bar/baz")) assert not p.is_parent(pathlib.PurePosixPath("/bar/baz")) def test_server_mtime_build(): now = datetime.datetime(year=2002, month=1, day=1).timestamp() past = datetime.datetime(year=2001, month=1, day=1).timestamp() b = aioftp.Server.build_list_mtime assert b(now, now) == "Jan 1 00:00" assert b(past, now) == "Jan 1 2001" def test_get_paths_windows_traverse(): base_path = pathlib.PureWindowsPath("C:\\ftp") user = aioftp.User() user.base_path = base_path connection = aioftp.Connection(current_directory=base_path, user=user) virtual_path = pathlib.PurePosixPath("/foo/C:\\windows") real_path, resolved_virtual_path = aioftp.Server.get_paths( connection, virtual_path, ) assert real_path == base_path assert resolved_virtual_path == pathlib.PurePosixPath("/") aioftp-0.21.4/tests/test_throttle.py000066400000000000000000000053361432163100400175010ustar00rootroot00000000000000import asyncio from functools import reduce from pathlib import Path import pytest import aioftp @pytest.mark.asyncio async def test_patched_sleep(skip_sleep): await asyncio.sleep(10) assert skip_sleep.is_close(10) SIZE = 3 * 100 * 1024 # 300KiB @pytest.mark.parametrize("times", [10, 20, 30]) @pytest.mark.parametrize("type", ["read", "write"]) @pytest.mark.parametrize("direction", ["download", "upload"]) @pytest.mark.asyncio async def test_client_side_throttle(pair_factory, skip_sleep, times, type, direction): async with pair_factory() as pair: await pair.make_server_files("foo", size=SIZE) await pair.make_client_files("foo", size=SIZE) getattr(pair.client.throttle, type).limit = SIZE / times await getattr(pair.client, direction)("foo") if (type, direction) in {("read", "download"), ("write", "upload")}: assert skip_sleep.is_close(times) else: assert skip_sleep.is_close(0) @pytest.mark.parametrize("times", [10, 20, 30]) @pytest.mark.parametrize("users", [1, 2, 3]) @pytest.mark.parametrize("throttle_direction", ["read", "write"]) @pytest.mark.parametrize("data_direction", ["download", "upload"]) @pytest.mark.parametrize("throttle_level", ["throttle", "throttle_per_connection"]) @pytest.mark.asyncio async def test_server_side_throttle(pair_factory, skip_sleep, times, users, throttle_direction, data_direction, throttle_level): async with pair_factory() as pair: names = [] for i in range(users): name = f"foo{i}" names.append(name) await pair.make_server_files(name, size=SIZE) throttle = reduce(getattr, [throttle_level, throttle_direction], pair.server) throttle.limit = SIZE / times clients = [] for name in names: c = aioftp.Client(path_io_factory=aioftp.MemoryPathIO) async with c.path_io.open(Path(name), "wb") as f: await f.write(b"-" * SIZE) await c.connect(pair.server.server_host, pair.server.server_port) await c.login() clients.append(c) coros = [getattr(c, data_direction)(n) for c, n in zip(clients, names)] await asyncio.gather(*coros) await asyncio.gather(*[c.quit() for c in clients]) throttled = {("read", "upload"), ("write", "download")} if (throttle_direction, data_direction) not in throttled: assert skip_sleep.is_close(0) else: t = times if throttle_level == "throttle": # global t *= users assert skip_sleep.is_close(t) aioftp-0.21.4/tests/test_user.py000066400000000000000000000002401432163100400165770ustar00rootroot00000000000000import pytest import aioftp def test_user_not_absolute_home(): with pytest.raises(aioftp.errors.PathIsNotAbsolute): aioftp.User(home_path="foo")