pax_global_header00006660000000000000000000000064145627602620014524gustar00rootroot0000000000000052 comment=ff5186fb3f4f95669e7857ce4bcaa996dafafadd mongomock-4.1.3/000077500000000000000000000000001456276026200135225ustar00rootroot00000000000000mongomock-4.1.3/.gitignore000066400000000000000000000006611456276026200155150ustar00rootroot00000000000000AUTHORS ChangeLog #### Python *.py[co] # Packages *.egg *.egg-info dist build eggs .eggs/ parts bin develop-eggs .installed.cfg # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox .achievements .cache/ #### Eclipse *.pydevproject .project .metadata # Intellij project files *.iml *.ipr *.iws .idea #### Emacs \#*\# .#* #### C/C++ *.o #### Haskell *.hi flycheck-* .DS_Store .env ### vim .ropeproject/ mongomock-4.1.3/.travis.yml000066400000000000000000000057051456276026200156420ustar00rootroot00000000000000language: python cache: pip os: linux dist: focal # OS+dist should be kept in sync with Dockerfile and UBUNTU_VERSION below. env: global: # Ubuntu version, keep in sync with os + dist above. - UBUNTU_VERSION=2004 # MongoDB version should be kept in sync with docker-compose image and mongomock/__init__.py - MONGODB=5.0.5 jobs: include: - python: pypy3 env: TOX_ENV=pypy3-pymongo-pyexecjs - python: 3.6 env: TOX_ENV=py36-pymongo-pyexecjs - python: 3.7 env: TOX_ENV=py37-pymongo-pyexecjs - python: 3.8 env: TOX_ENV=py38-pymongo-pyexecjs - python: 3.9 env: TOX_ENV=py39-pymongo361-pyexecjs - python: 3.9 env: TOX_ENV=py39-pymongo-pyexecjs - python: "3.10" env: TOX_ENV=py310 - python: "3.10" env: TOX_ENV=py310-pymongo - python: "3.10" env: TOX_ENV=py310-pyexecjs - python: "3.10" env: TOX_ENV=py310-pymongo-pyexecjs - python: "3.10" env: TOX_ENV=pep8 - python: "3.10" env: TOX_ENV=pylint - python: "3.10" env: TOX_ENV=mypy - python: "3.10" env: TOX_ENV=py310-pymongo-pyexecjs-mongodb4 MONGODB=4.4.0 script: - tox -e $TOX_ENV before_install: - "sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10" - "sudo apt-get update" - "sudo apt-get install libcurl4 -y" before_script: - mkdir ${PWD}/mongodb-linux-x86_64-ubuntu${UBUNTU_VERSION}-${MONGODB}/data - ${PWD}/mongodb-linux-x86_64-ubuntu${UBUNTU_VERSION}-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-ubuntu${UBUNTU_VERSION}-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-ubuntu${UBUNTU_VERSION}-${MONGODB}/mongodb.log --fork - "until nc -z localhost 27017; do echo Waiting for MongoDB; sleep 1; done" after_script: - pkill mongod install: - wget https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu${UBUNTU_VERSION}-${MONGODB}.tgz - tar xzf mongodb-linux-x86_64-ubuntu${UBUNTU_VERSION}-${MONGODB}.tgz - ${PWD}/mongodb-linux-x86_64-ubuntu${UBUNTU_VERSION}-${MONGODB}/bin/mongod --version - pip install tox deploy: provider: pypi username: __token__ distributions: "bdist_wheel sdist" password: secure: "V77o/nLMWGzQ83Ilpb7zqLmQZAGWRScAQDqloBFeriJXrjh8IFETjl4euWYNbFPzlImXb/ujxYoW0Jd3IdWY/WNTgWaasW++RRBSguwhMzAYZrL2xLOMw23zFOtvOuQ9JEKSJyS3G3GvadUdzQC1Wt/H6LPOuclxJF5oIgQUVU9T4mXpr6xMhw1D86joLCFD juzvZux02CDfUryRmcNqVd8KDYcs7JCLC9bgJxEB/7nBJEdICONZ8J/kGaiMEFDfplgdpAfR1KX/sVBEMYBCG5eDin1S2Bi22mgJ6FRsTegd5dy1sj+sT+sATmWJ8IlCpnkCszg/psQRmSiSkNEpH2mSUp3KZLspUSS+E9gWwbOyYGR+5OwHQ0NQzfCyPf8nADP/l1v3Wxdw 4gqZUynAmV4cWmGP1tnKCpgkGvTr3QRCLmjQGXPnhgYOcJpu6TT5UhmdB5dhdVWYgYqGu27pjAVy+grK9GmtHYAg5MmZC0v6GClEsxeA/qXe9McyFU13YHKppmAelzIcewy+tJxwFmFSPvscHK8Q/j58C5NIOBqVT9GR3jWQqtsvA8yCxmp/KD1sPSF1KSHHUHlCMlmOpmzc ZLkLaskRWqznM6wxiLs8UeuyaEEA5R/BdZs4oNZFm9ls0gsf5GPZA6r+8GI5OrSWs6uun/+Th+Xys9yNiMU=" on: condition: "$TOX_ENV = py310" python: "3.10" tags: true all_branches: true repo: mongomock/mongomock mongomock-4.1.3/Dockerfile000066400000000000000000000013221456276026200155120ustar00rootroot00000000000000# OS+dist should be kept in sync with .travis.yml FROM ubuntu:focal RUN apt-get update && apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev git netcat RUN curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash ENV PYENV_ROOT /root/.pyenv ENV PATH $PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH RUN eval "$(pyenv init -)" && eval "$(pyenv virtualenv-init -)" RUN pyenv install 3.6.15 RUN pyenv install 3.7.9 RUN pyenv install 3.8.12 RUN pyenv install 3.9.10 RUN pyenv install pypy3.8-7.3.7 RUN pyenv install 3.10.2 RUN pyenv local 3.6.15 3.7.9 3.8.12 3.9.10 3.10.2 pypy3.8-7.3.7 RUN pip install tox mongomock-4.1.3/LICENSE000066400000000000000000000027071456276026200145350ustar00rootroot00000000000000Copyright (c) 2012, Rotem Yaari All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of organization nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY Rotem Yaari ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rotem Yaari BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.mongomock-4.1.3/MANIFEST.in000066400000000000000000000002411456276026200152550ustar00rootroot00000000000000include tox.ini include tests/*.py include *.rst include *.txt include AUTHORS include Changelog include LICENSE include Makefile recursive-include tests *.json mongomock-4.1.3/Makefile000066400000000000000000000005471456276026200151700ustar00rootroot00000000000000default: test detox-test: detox travis-test: test test: env .env/bin/nosetests -w tests coverage-test: env .env/bin/coverage run .env/bin/nosetests -w tests env: .env/.up-to-date .env/.up-to-date: setup.py Makefile virtualenv .env .env/bin/pip install -e . .env/bin/pip install nose coverage PyExecJS pymongo touch .env/.up-to-date .PHONY: doc mongomock-4.1.3/Missing_Features.rst000066400000000000000000000060321456276026200175240ustar00rootroot00000000000000What is this? ------------- This document lists down the features missing in mongomock library. PRs for these features are highly appreciated. If I miss to include a feature in the below list, Please feel free to add to the below list and raise a PR. * $rename complex operations - https://docs.mongodb.com/manual/reference/operator/update/rename/ * create_collection options - https://docs.mongodb.com/v3.2/reference/method/db.createCollection/#definition * bypass_document_validation options * session options * codec options * Operations of the aggregate pipeline: * `$bucketAuto `_ * `$collStats `_ * `$currentOp `_ * `$geoNear `_ * `$indexStats `_ * `$listLocalSessions `_ * `$listSessions `_ * `$merge `_ * `$planCacheStats `_ * `$redact `_ * `$replaceWith `_ * `$sortByCount `_ * `$unset ` _ * Operators within the aggregate pipeline: * Arithmetic operations on dates: * `$add `_ * Some date operators ($isoDayOfWeek, $isoWeekYear, …) * Some set operators ($setIntersection, $setDifference, …) * Some string operators ($indexOfBytes, $trim, …) * Text search operator ($meta) * Projection operator $map * Array operators ($isArray, $indexOfArray, …) * `$mergeObjects `_ * Some type conversion operators ($convert, …) * Operators within the query language (find): * `$jsonSchema `_ * `$text `_ search * `$where `_ * `map_reduce `_ options (``scope`` and ``finalize``) * Database `command `_ method except for the ``ping`` command. * Raw Batch BSON operations (`aggregate_raw_batches` and `find_raw_batches`) * `Expiring Data `_ mongomock-4.1.3/README.rst000066400000000000000000000243111456276026200152120ustar00rootroot00000000000000.. image:: https://app.travis-ci.com/mongomock/mongomock.svg?branch=develop :target: https://app.travis-ci.com/mongomock/mongomock |pypi_version| |pypi_license| |pypi_wheel| .. image:: https://codecov.io/gh/mongomock/mongomock/branch/develop/graph/badge.svg :target: https://codecov.io/gh/mongomock/mongomock What is this? ------------- Mongomock is a small library to help testing Python code that interacts with MongoDB via Pymongo. To understand what it's useful for, we can take the following code: .. code-block:: python def increase_votes(collection): for document in collection.find(): collection.update_one(document, {'$set': {'votes': document['votes'] + 1}}) The above code can be tested in several ways: 1. It can be tested against a real mongodb instance with pymongo. 2. It can receive a record-replay style mock as an argument. In this manner we record the expected calls (find, and then a series of updates), and replay them later. 3. It can receive a carefully hand-crafted mock responding to find() and update() appropriately. Option number 1 is obviously the best approach here, since we are testing against a real mongodb instance. However, a mongodb instance needs to be set up for this, and cleaned before/after the test. You might want to run your tests in continuous integration servers, on your laptop, or other bizarre platforms - which makes the mongodb requirement a liability. We are left with #2 and #3. Unfortunately they are very high maintenance in real scenarios, since they replicate the series of calls made in the code, violating the DRY rule. Let's see #2 in action - we might write our test like so: .. code-block:: python def test_increase_votes(): objects = [dict(...), dict(...), ...] collection_mock = my_favorite_mock_library.create_mock(Collection) record() collection_mock.find().AndReturn(objects) for obj in objects: collection_mock.update_one(obj, {'$set': {'votes': obj['votes']}}) replay() increase_votes(collection_mock) verify() Let's assume the code changes one day, because the author just learned about the '$inc' instruction: .. code-block:: python def increase_votes(collection): collection.update_many({}, {'$inc': {'votes': 1}}) This breaks the test, although the end result being tested is just the same. The test also repeats large portions of the code we already wrote. We are left, therefore, with option #3 -- you want something to behave like a mongodb database collection, without being one. This is exactly what this library aims to provide. With mongomock, the test simply becomes: .. code-block:: python def test_increase_votes(): collection = mongomock.MongoClient().db.collection objects = [dict(votes=1), dict(votes=2), ...] for obj in objects: obj['_id'] = collection.insert_one(obj).inserted_id increase_votes(collection) for obj in objects: stored_obj = collection.find_one({'_id': obj['_id']}) stored_obj['votes'] -= 1 assert stored_obj == obj # by comparing all fields we make sure only votes changed This code checks *increase_votes* with respect to its functionality, not syntax or algorithm, and therefore is much more robust as a test. If the code to be tested is creating the connection itself with pymongo, you can use mongomock.patch (NOTE: you should use :code:`pymongo.MongoClient(...)` rather than :code:`from pymongo import MongoClient`, as shown below): .. code-block:: python @mongomock.patch(servers=(('server.example.com', 27017),)) def test_increate_votes_endpoint(): objects = [dict(votes=1), dict(votes=2), ...] client = pymongo.MongoClient('server.example.com') client.db.collection.insert_many(objects) call_endpoint('/votes') ... verify client.db.collection Important Note About Project Status & Development ------------------------------------------------- MongoDB is complex. This library aims at a reasonably complete mock of MongoDB for testing purposes, not a perfect replica. This means some features are not likely to make it in any time soon. Also, since many corner cases are encountered along the way, our goal is to try and TDD our way into completeness. This means that every time we encounter a missing or broken (incompatible) feature, we write a test for it and fix it. There are probably lots of such issues hiding around lurking, so feel free to open issues and/or pull requests and help the project out! **NOTE**: We don't include pymongo functionality as "stubs" or "placeholders". Since this library is used to validate production code, it is unacceptable to behave differently than the real pymongo implementation. In such cases it is better to throw `NotImplementedError` than implement a modified version of the original behavior. Upgrading to Pymongo v4 ----------------------- The major version 4 of Pymongo changed the API quite a bit. The Mongomock library has evolved to help you ease the migration: 1. Upgrade to Mongomock v4 or above: if your tests are running with Pymongo installed, Mongomock will adapt its own API to the version of Pymongo installed. 2. Upgrade to Pymongo v4 or above: your tests using Mongomock will fail exactly where your code would fail in production, so that you can fix it before releasing. Contributing ------------ When submitting a PR, please make sure that: 1. You include tests for the feature you are adding or bug you are fixing. Preferably, the test should compare against the real MongoDB engine (see `examples in tests`_ for reference). 2. No existing test got deleted or unintentionally castrated 3. The travis build passes on your PR. To download, setup and perfom tests, run the following commands on Mac / Linux: .. code-block:: bash git clone git@github.com:mongomock/mongomock.git pip install tox cd mongomock tox Alternatively, docker-compose can be used to simplify dependency management for local development: .. code-block:: bash git clone git@github.com:mongomock/mongomock.git cd mongomock docker-compose build docker-compose run --rm mongomock If you need/want tox to recreate its environments, you can override the container command by running: .. code-block:: bash docker-compose run --rm mongomock tox -r Similarly, if you'd like to run tox against a specific environment in the container: .. code-block:: bash docker-compose run --rm mongomock tox -e py38-pymongo-pyexecjs If you'd like to run only one test, you can also add the test name at the end of your command: .. code-block:: bash docker-compose run --rm mongomock tox -e py38-pymongo-pyexecjs tests.test__mongomock.MongoClientCollectionTest.test__aggregate_system_variables_generate_array NOTE: If the MongoDB image was updated, or you want to try a different MongoDB version in docker-compose, you'll have to issue a `docker-compose down` before you do anything else to ensure you're running against the intended version. utcnow ~~~~ When developing features that need to make use of "now," please use the libraries :code:`utcnow` helper method in the following way: .. code-block:: python import mongomock # Awesome code! now_reference = mongomock.utcnow() This provides users a consistent way to mock the notion of "now" in mongomock if they so choose. Please see `utcnow docstring for more details `_. Branching model ~~~~~~~~~~~~~~~ The branching model used for this project follows the `gitflow workflow`_. This means that pull requests should be issued against the `develop` branch and *not* the `master` branch. If you want to contribute to the legacy 2.x branch then your pull request should go into the `support/2.x` branch. Releasing ~~~~~~~~~ When ready for a release, tag the `develop` branch with a new tag (please keep semver names) and push your tags to GitHub. The CI should do the rest. To add release notes, create a release in GitHub's `Releases Page `_ then generate the release notes locally with: .. code-block:: bash python3 -c "from pbr import git; git.write_git_changelog()" Then you can get the relevant section in the generated `Changelog` file. Acknowledgements ---------------- Mongomock has originally been developed by `Rotem Yaari `_, then by `Martin Domke `. It is currently being developed and maintained by `Pascal Corpet `_ . Also, many thanks go to the following people for helping out, contributing pull requests and fixing bugs: * Alec Perkins * Alexandre Viau * Austin W Ellis * Andrey Ovchinnikov * Arthur Hirata * Baruch Oxman * Corey Downing * Craig Hobbs * Daniel Murray * David Fischer * Diego Garcia * Dmitriy Kostochko * Drew Winstel * Eddie Linder * Edward D'Souza * Emily Rosengren * Eugene Chernyshov * Grigoriy Osadchenko * Israel Teixeira * Jacob Perkins * Jason Burchfield * Jason Sommer * Jeff Browning * Jeff McGee * Joël Franusic * `Jonathan Hedén `_ * Julian Hille * Krzysztof Płocharz * Lyon Zhang * `Lucas Rangel Cezimbra `_ * Marc Prewitt * Marcin Barczynski * Marian Galik * Michał Albrycht * Mike Ho * Nigel Choi * Omer Gertel * Omer Katz * Papp Győző * Paul Glass * Scott Sexton * Srinivas Reddy Thatiparthy * Taras Boiko * Todd Tomkinson * `Xinyan Lu `_ * Zachary Carter * catty (ca77y _at_ live.com) * emosenkis * hthieu1110 * יppetlinskiy * pacud * tipok * waskew (waskew _at_ narrativescience.com) * jmsantorum (jmsantorum [at] gmail [dot] com) * lidongyong * `Juan Gutierrez `_ .. _examples in tests: https://github.com/mongomock/mongomock/blob/develop/tests/test__mongomock.py .. _gitflow workflow: https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow .. |travis| image:: https://travis-ci.org/mongomock/mongomock.svg?branch=develop :target: https://travis-ci.org/mongomock/mongomock :alt: Travis CI build .. |pypi_version| image:: https://img.shields.io/pypi/v/mongomock.svg :target: https://pypi.python.org/pypi/mongomock :alt: PyPI package .. |pypi_license| image:: https://img.shields.io/pypi/l/mongomock.svg :alt: PyPI license .. |pypi_wheel| image:: https://img.shields.io/pypi/wheel/mongomock.svg :alt: PyPI wheel status mongomock-4.1.3/docker-compose.yml000066400000000000000000000007051456276026200171610ustar00rootroot00000000000000version: "3.8" services: mongomock: build: context: . image: mongomock:latest working_dir: /project command: tox environment: - TEST_MONGO_HOST=mongodb://mongo/mock - MONGODB=5.0.5 volumes: - .:/project depends_on: - mongo mongo: # The image chosen here should be kept in sync with the .travis.yml mongo version and with # the value in mongomock/__init__.py image: mongo:5.0.5 mongomock-4.1.3/mongomock/000077500000000000000000000000001456276026200155135ustar00rootroot00000000000000mongomock-4.1.3/mongomock/__init__.py000066400000000000000000000051371456276026200176320ustar00rootroot00000000000000import os try: from pymongo.errors import PyMongoError except ImportError: class PyMongoError(Exception): pass try: from pymongo.errors import OperationFailure except ImportError: class OperationFailure(PyMongoError): def __init__(self, message, code=None, details=None): super(OperationFailure, self).__init__() self._message = message self._code = code self._details = details code = property(lambda self: self._code) details = property(lambda self: self._details) def __str__(self): return self._message try: from pymongo.errors import WriteError except ImportError: class WriteError(OperationFailure): pass try: from pymongo.errors import DuplicateKeyError except ImportError: class DuplicateKeyError(WriteError): pass try: from pymongo.errors import BulkWriteError except ImportError: class BulkWriteError(OperationFailure): def __init__(self, results): super(BulkWriteError, self).__init__( 'batch op errors occurred', 65, results) try: from pymongo.errors import CollectionInvalid except ImportError: class CollectionInvalid(PyMongoError): pass try: from pymongo.errors import InvalidName except ImportError: class InvalidName(PyMongoError): pass try: from pymongo.errors import InvalidOperation except ImportError: class InvalidOperation(PyMongoError): pass try: from pymongo.errors import ConfigurationError except ImportError: class ConfigurationError(PyMongoError): pass try: from pymongo.errors import InvalidURI except ImportError: class InvalidURI(ConfigurationError): pass from .helpers import ObjectId, utcnow # noqa from mongomock.__version__ import __version__ __all__ = [ '__version__', 'Database', 'DuplicateKeyError', 'Collection', 'CollectionInvalid', 'InvalidName', 'MongoClient', 'ObjectId', 'OperationFailure', 'WriteConcern', 'ignore_feature', 'patch', 'warn_on_feature', 'SERVER_VERSION', ] from .collection import Collection from .database import Database from .mongo_client import MongoClient from .patch import patch from .write_concern import WriteConcern from .not_implemented import ignore_feature, warn_on_feature # The version of the server faked by mongomock. Callers may patch it before creating connections to # update the behavior of mongomock. # Keep the default version in sync with docker-compose.yml and travis.yml. SERVER_VERSION = os.getenv('MONGODB', '5.0.5') mongomock-4.1.3/mongomock/__init__.pyi000066400000000000000000000016001456276026200177720ustar00rootroot00000000000000from typing import Any, Callable, Literal, Sequence, Tuple, Union from unittest import mock from bson.objectid import ObjectId as ObjectId from pymongo import MongoClient as MongoClient from pymongo.collection import Collection as Collection from pymongo.database import Database as Database from pymongo.errors import ( CollectionInvalid as CollectionInvalid, DuplicateKeyError as DuplicateKeyError, InvalidName as InvalidName, OperationFailure as OperationFailure, ) def patch( servers: Union[str, Tuple[str, int], Sequence[Union[str, Tuple[str, int]]]] = ..., on_new: Literal['error', 'create', 'timeout', 'pymongo'] = ..., ) -> mock._patch: ... _FeatureName = Literal['collation', 'session'] def ignore_feature(feature: _FeatureName) -> None: ... def warn_on_feature(feature: _FeatureName) -> None: ... SERVER_VERSION: str = ... mongomock-4.1.3/mongomock/__version__.py000066400000000000000000000005121456276026200203440ustar00rootroot00000000000000from platform import python_version_tuple python_version = python_version_tuple() if (int(python_version[0]), int(python_version[1])) >= (3, 8): from importlib.metadata import version __version__ = version('mongomock') else: import pkg_resources __version__ = pkg_resources.get_distribution('mongomock').version mongomock-4.1.3/mongomock/aggregate.py000066400000000000000000002007161456276026200200210ustar00rootroot00000000000000"""Module to handle the operations within the aggregate pipeline.""" import bisect import collections import copy import datetime import decimal import functools import itertools import math import numbers from packaging import version import random import re import sys import warnings import pytz from sentinels import NOTHING import mongomock from mongomock import command_cursor from mongomock import filtering from mongomock import helpers from mongomock import OperationFailure try: from bson.errors import InvalidDocument from bson import Regex, decimal128 decimal_support = True _RE_TYPES = (helpers.RE_TYPE, Regex) except ImportError: InvalidDocument = OperationFailure decimal_support = False _RE_TYPES = (helpers.RE_TYPE) _random = random.Random() group_operators = [ '$addToSet', '$avg', '$first', '$last', '$max', '$mergeObjects', '$min', '$push', '$stdDevPop', '$stdDevSamp', '$sum', ] unary_arithmetic_operators = { '$abs', '$ceil', '$exp', '$floor', '$ln', '$log10', '$sqrt', '$trunc', } binary_arithmetic_operators = { '$divide', '$log', '$mod', '$pow', '$subtract', } arithmetic_operators = unary_arithmetic_operators | binary_arithmetic_operators | { '$add', '$multiply', } project_operators = [ '$max', '$min', '$avg', '$sum', '$stdDevPop', '$stdDevSamp', '$arrayElemAt', '$first', '$last', ] control_flow_operators = [ '$switch', ] projection_operators = [ '$let', '$literal', ] date_operators = [ '$dateFromString', '$dateToString', '$dateFromParts', '$dayOfMonth', '$dayOfWeek', '$dayOfYear', '$hour', '$isoDayOfWeek', '$isoWeek', '$isoWeekYear', '$millisecond', '$minute', '$month', '$second', '$week', '$year', ] conditional_operators = ['$cond', '$ifNull'] array_operators = [ '$concatArrays', '$filter', '$indexOfArray', '$map', '$range', '$reduce', '$reverseArray', '$size', '$slice', '$zip', ] object_operators = [ '$mergeObjects', ] text_search_operators = ['$meta'] string_operators = [ '$concat', '$indexOfBytes', '$indexOfCP', '$regexMatch', '$split', '$strcasecmp', '$strLenBytes', '$strLenCP', '$substr', '$substrBytes', '$substrCP', '$toLower', '$toUpper', '$trim', ] comparison_operators = [ '$cmp', '$eq', '$ne', ] + list(filtering.SORTING_OPERATOR_MAP.keys()) boolean_operators = ['$and', '$or', '$not'] set_operators = [ '$in', '$setEquals', '$setIntersection', '$setDifference', '$setUnion', '$setIsSubset', '$anyElementTrue', '$allElementsTrue', ] type_convertion_operators = [ '$convert', '$toString', '$toInt', '$toDecimal', '$toLong', '$arrayToObject', '$objectToArray', ] type_operators = [ '$isNumber', '$isArray', ] def _avg_operation(values): values_list = list(v for v in values if isinstance(v, numbers.Number)) if not values_list: return None return sum(values_list) / float(len(list(values_list))) def _group_operation(values, operator): values_list = list(v for v in values if v is not None) if not values_list: return None return operator(values_list) def _sum_operation(values): values_list = list() if decimal_support: for v in values: if isinstance(v, numbers.Number): values_list.append(v) elif isinstance(v, decimal128.Decimal128): values_list.append(v.to_decimal()) else: values_list = list(v for v in values if isinstance(v, numbers.Number)) sum_value = sum(values_list) return decimal128.Decimal128(sum_value) if isinstance(sum_value, decimal.Decimal) else sum_value def _merge_objects_operation(values): merged_doc = dict() for v in values: if isinstance(v, dict): merged_doc.update(v) return merged_doc _GROUPING_OPERATOR_MAP = { '$sum': _sum_operation, '$avg': _avg_operation, '$mergeObjects': _merge_objects_operation, '$min': lambda values: _group_operation(values, min), '$max': lambda values: _group_operation(values, max), '$first': lambda values: values[0] if values else None, '$last': lambda values: values[-1] if values else None, } class _Parser(object): """Helper to parse expressions within the aggregate pipeline.""" def __init__(self, doc_dict, user_vars=None, ignore_missing_keys=False): self._doc_dict = doc_dict self._ignore_missing_keys = ignore_missing_keys self._user_vars = user_vars or {} def parse(self, expression): """Parse a MongoDB expression.""" if not isinstance(expression, dict): # May raise a KeyError despite the ignore missing key. return self._parse_basic_expression(expression) if len(expression) > 1 and any(key.startswith('$') for key in expression): raise OperationFailure( 'an expression specification must contain exactly one field, ' 'the name of the expression. Found %d fields in %s' % (len(expression), expression)) value_dict = {} for k, v in expression.items(): if k in arithmetic_operators: return self._handle_arithmetic_operator(k, v) if k in project_operators: return self._handle_project_operator(k, v) if k in projection_operators: return self._handle_projection_operator(k, v) if k in comparison_operators: return self._handle_comparison_operator(k, v) if k in date_operators: return self._handle_date_operator(k, v) if k in array_operators: return self._handle_array_operator(k, v) if k in conditional_operators: return self._handle_conditional_operator(k, v) if k in control_flow_operators: return self._handle_control_flow_operator(k, v) if k in set_operators: return self._handle_set_operator(k, v) if k in string_operators: return self._handle_string_operator(k, v) if k in type_convertion_operators: return self._handle_type_convertion_operator(k, v) if k in type_operators: return self._handle_type_operator(k, v) if k in boolean_operators: return self._handle_boolean_operator(k, v) if k in text_search_operators + projection_operators + object_operators: raise NotImplementedError( "'%s' is a valid operation but it is not supported by Mongomock yet." % k) if k.startswith('$'): raise OperationFailure("Unrecognized expression '%s'" % k) try: value = self.parse(v) except KeyError: if self._ignore_missing_keys: continue raise value_dict[k] = value return value_dict def parse_many(self, values): for value in values: try: yield self.parse(value) except KeyError: if self._ignore_missing_keys: yield None else: raise def _parse_to_bool(self, expression): """Parse a MongoDB expression and then convert it to bool""" # handles converting `undefined` (in form of KeyError) to False try: return helpers.mongodb_to_bool(self.parse(expression)) except KeyError: return False def _parse_or_nothing(self, expression): try: return self.parse(expression) except KeyError: return NOTHING def _parse_basic_expression(self, expression): if isinstance(expression, str) and expression.startswith('$'): if expression.startswith('$$'): return helpers.get_value_by_dot(dict({ 'ROOT': self._doc_dict, 'CURRENT': self._doc_dict, }, **self._user_vars), expression[2:], can_generate_array=True) return helpers.get_value_by_dot(self._doc_dict, expression[1:], can_generate_array=True) return expression def _handle_boolean_operator(self, operator, values): if operator == '$and': return all([self._parse_to_bool(value) for value in values]) if operator == '$or': return any(self._parse_to_bool(value) for value in values) if operator == '$not': return not self._parse_to_bool(values) # This should never happen: it is only a safe fallback if something went wrong. raise NotImplementedError( # pragma: no cover "Although '%s' is a valid boolean operator for the " 'aggregation pipeline, it is currently not implemented' ' in Mongomock.' % operator ) def _handle_arithmetic_operator(self, operator, values): if operator in unary_arithmetic_operators: try: number = self.parse(values) except KeyError: return None if number is None: return None if not isinstance(number, numbers.Number): raise OperationFailure( "Parameter to %s must evaluate to a number, got '%s'" % (operator, type(number))) if operator == '$abs': return abs(number) if operator == '$ceil': return math.ceil(number) if operator == '$exp': return math.exp(number) if operator == '$floor': return math.floor(number) if operator == '$ln': return math.log(number) if operator == '$log10': return math.log10(number) if operator == '$sqrt': return math.sqrt(number) if operator == '$trunc': return math.trunc(number) if operator in binary_arithmetic_operators: if not isinstance(values, (tuple, list)): raise OperationFailure( "Parameter to %s must evaluate to a list, got '%s'" % (operator, type(values))) if len(values) != 2: raise OperationFailure('%s must have only 2 parameters' % operator) number_0, number_1 = self.parse_many(values) if number_0 is None or number_1 is None: return None if operator == '$divide': return number_0 / number_1 if operator == '$log': return math.log(number_0, number_1) if operator == '$mod': return math.fmod(number_0, number_1) if operator == '$pow': return math.pow(number_0, number_1) if operator == '$subtract': if isinstance(number_0, datetime.datetime) and \ isinstance(number_1, (int, float)): number_1 = datetime.timedelta(milliseconds=number_1) res = number_0 - number_1 if isinstance(res, datetime.timedelta): return round(res.total_seconds() * 1000) return res assert isinstance(values, (tuple, list)), \ "Parameter to %s must evaluate to a list, got '%s'" % (operator, type(values)) parsed_values = list(self.parse_many(values)) assert parsed_values, '%s must have at least one parameter' % operator for value in parsed_values: if value is None: return None assert isinstance(value, numbers.Number), '%s only uses numbers' % operator if operator == '$add': return sum(parsed_values) if operator == '$multiply': return functools.reduce(lambda x, y: x * y, parsed_values) # This should never happen: it is only a safe fallback if something went wrong. raise NotImplementedError( # pragma: no cover "Although '%s' is a valid aritmetic operator for the aggregation " 'pipeline, it is currently not implemented in Mongomock.' % operator) def _handle_project_operator(self, operator, values): if operator in _GROUPING_OPERATOR_MAP: values = self.parse(values) if isinstance(values, str) else self.parse_many(values) return _GROUPING_OPERATOR_MAP[operator](values) if operator == '$arrayElemAt': key, value = values array = self.parse(key) index = self.parse(value) try: return array[index] except IndexError as error: raise KeyError('Array have length less than index value') from error raise NotImplementedError("Although '%s' is a valid project operator for the " 'aggregation pipeline, it is currently not implemented ' 'in Mongomock.' % operator) def _handle_projection_operator(self, operator, value): if operator == '$literal': return value if operator == '$let': if not isinstance(value, dict): raise InvalidDocument('$let only supports an object as its argument') for field in ('vars', 'in'): if field not in value: raise OperationFailure("Missing '{}' parameter to $let".format(field)) if not isinstance(value['vars'], dict): raise OperationFailure('invalid parameter: expected an object (vars)') user_vars = { var_key: self.parse(var_value) for var_key, var_value in value['vars'].items() } return _Parser( self._doc_dict, dict(self._user_vars, **user_vars), ignore_missing_keys=self._ignore_missing_keys, ).parse(value['in']) raise NotImplementedError("Although '%s' is a valid project operator for the " 'aggregation pipeline, it is currently not implemented ' 'in Mongomock.' % operator) def _handle_comparison_operator(self, operator, values): assert len(values) == 2, 'Comparison requires two expressions' a = self.parse(values[0]) b = self.parse(values[1]) if operator == '$eq': return a == b if operator == '$ne': return a != b if operator in filtering.SORTING_OPERATOR_MAP: return filtering.bson_compare(filtering.SORTING_OPERATOR_MAP[operator], a, b) raise NotImplementedError( "Although '%s' is a valid comparison operator for the " 'aggregation pipeline, it is currently not implemented ' ' in Mongomock.' % operator) def _handle_string_operator(self, operator, values): if operator == '$toLower': parsed = self.parse(values) return str(parsed).lower() if parsed is not None else '' if operator == '$toUpper': parsed = self.parse(values) return str(parsed).upper() if parsed is not None else '' if operator == '$concat': parsed_list = list(self.parse_many(values)) return None if None in parsed_list else ''.join([str(x) for x in parsed_list]) if operator == '$split': if len(values) != 2: raise OperationFailure('split must have 2 items') try: string = self.parse(values[0]) delimiter = self.parse(values[1]) except KeyError: return None if string is None or delimiter is None: return None if not isinstance(string, str): raise TypeError('split first argument must evaluate to string') if not isinstance(delimiter, str): raise TypeError('split second argument must evaluate to string') return string.split(delimiter) if operator == '$substr': if len(values) != 3: raise OperationFailure('substr must have 3 items') string = str(self.parse(values[0])) first = self.parse(values[1]) length = self.parse(values[2]) if string is None: return '' if first < 0: warnings.warn('Negative starting point given to $substr is accepted only until ' 'MongoDB 3.7. This behavior will change in the future.') return '' if length < 0: warnings.warn('Negative length given to $substr is accepted only until ' 'MongoDB 3.7. This behavior will change in the future.') second = len(string) if length < 0 else first + length return string[first:second] if operator == '$strcasecmp': if len(values) != 2: raise OperationFailure('strcasecmp must have 2 items') a, b = str(self.parse(values[0])), str(self.parse(values[1])) return 0 if a == b else -1 if a < b else 1 if operator == '$regexMatch': if not isinstance(values, dict): raise OperationFailure( '$regexMatch expects an object of named arguments but found: %s' % type(values)) for field in ('input', 'regex'): if field not in values: raise OperationFailure("$regexMatch requires '%s' parameter" % field) unknown_args = set(values) - {'input', 'regex', 'options'} if unknown_args: raise OperationFailure( '$regexMatch found an unknown argument: %s' % list(unknown_args)[0]) try: input_value = self.parse(values['input']) except KeyError: return False if not isinstance(input_value, str): raise OperationFailure("$regexMatch needs 'input' to be of type string") try: regex_val = self.parse(values['regex']) except KeyError: return False options = None for option in values.get('options', ''): if option not in 'imxs': raise OperationFailure( '$regexMatch invalid flag in regex options: %s' % option) re_option = getattr(re, option.upper()) if options is None: options = re_option else: options |= re_option if isinstance(regex_val, str): if options is None: regex = re.compile(regex_val) else: regex = re.compile(regex_val, options) elif 'options' in values and regex_val.flags: raise OperationFailure( "$regexMatch: regex option(s) specified in both 'regex' and 'option' fields") elif isinstance(regex_val, helpers.RE_TYPE): if options and not regex_val.flags: regex = re.compile(regex_val.pattern, options) elif regex_val.flags & ~(re.I | re.M | re.X | re.S): raise OperationFailure( '$regexMatch invalid flag in regex options: %s' % regex_val.flags) else: regex = regex_val elif isinstance(regex_val, _RE_TYPES): # bson.Regex if regex_val.flags & ~(re.I | re.M | re.X | re.S): raise OperationFailure( '$regexMatch invalid flag in regex options: %s' % regex_val.flags) regex = re.compile(regex_val.pattern, regex_val.flags or options) else: raise OperationFailure("$regexMatch needs 'regex' to be of type string or regex") return bool(regex.search(input_value)) # This should never happen: it is only a safe fallback if something went wrong. raise NotImplementedError( # pragma: no cover "Although '%s' is a valid string operator for the aggregation " 'pipeline, it is currently not implemented in Mongomock.' % operator) def _handle_date_operator(self, operator, values): if isinstance(values, dict) and values.keys() == {'date', 'timezone'}: value = self.parse(values['date']) target_tz = pytz.timezone(values['timezone']) out_value = value.replace(tzinfo=pytz.utc).astimezone(target_tz) else: out_value = self.parse(values) if operator == '$dayOfYear': return out_value.timetuple().tm_yday if operator == '$dayOfMonth': return out_value.day if operator == '$dayOfWeek': return (out_value.isoweekday() % 7) + 1 if operator == '$year': return out_value.year if operator == '$month': return out_value.month if operator == '$week': return int(out_value.strftime('%U')) if operator == '$hour': return out_value.hour if operator == '$minute': return out_value.minute if operator == '$second': return out_value.second if operator == '$millisecond': return int(out_value.microsecond / 1000) if operator == '$dateToString': if not isinstance(values, dict): raise OperationFailure( '$dateToString operator must correspond a dict' 'that has "format" and "date" field.' ) if not isinstance(values, dict) or not {'format', 'date'} <= set(values): raise OperationFailure( '$dateToString operator must correspond a dict' 'that has "format" and "date" field.' ) if '%L' in out_value['format']: raise NotImplementedError( 'Although %L is a valid date format for the ' '$dateToString operator, it is currently not implemented ' ' in Mongomock.' ) if 'onNull' in values: raise NotImplementedError( 'Although onNull is a valid field for the ' '$dateToString operator, it is currently not implemented ' ' in Mongomock.' ) if 'timezone' in values.keys(): raise NotImplementedError( 'Although timezone is a valid field for the ' '$dateToString operator, it is currently not implemented ' ' in Mongomock.' ) return out_value['date'].strftime(out_value['format']) if operator == '$dateFromParts': if not isinstance(out_value, dict): raise OperationFailure( f'{operator} operator must correspond a dict ' 'that has "year" or "isoWeekYear" field.' ) if len(set(out_value) & {'year', 'isoWeekYear'}) != 1: raise OperationFailure( f'{operator} operator must correspond a dict ' 'that has "year" or "isoWeekYear" field.' ) for field in ('isoWeekYear', 'isoWeek', 'isoDayOfWeek', 'timezone'): if field in out_value: raise NotImplementedError( f'Although {field} is a valid field for the ' f'{operator} operator, it is currently not implemented ' 'in Mongomock.' ) year = out_value['year'] month = out_value.get('month', 1) or 1 day = out_value.get('day', 1) or 1 hour = out_value.get('hour', 0) or 0 minute = out_value.get('minute', 0) or 0 second = out_value.get('second', 0) or 0 millisecond = out_value.get('millisecond', 0) or 0 return datetime.datetime( year=year, month=month, day=day, hour=hour, minute=minute, second=second, microsecond=millisecond ) raise NotImplementedError( "Although '%s' is a valid date operator for the " 'aggregation pipeline, it is currently not implemented ' ' in Mongomock.' % operator) def _handle_array_operator(self, operator, value): if operator == '$concatArrays': if not isinstance(value, (list, tuple)): value = [value] parsed_list = list(self.parse_many(value)) for parsed_item in parsed_list: if parsed_item is not None and not isinstance(parsed_item, (list, tuple)): raise OperationFailure( '$concatArrays only supports arrays, not {}'.format(type(parsed_item)) ) return None if None in parsed_list else list(itertools.chain.from_iterable(parsed_list)) if operator == '$map': if not isinstance(value, dict): raise OperationFailure('$map only supports an object as its argument') # NOTE: while the two validations below could be achieved with # one-liner set operations (e.g. set(value) - {'input', 'as', # 'in'}), we prefer the iteration-based approaches in order to # mimic MongoDB's behavior regarding the order of evaluation. For # example, MongoDB complains about 'input' parameter missing before # 'in'. for k in ('input', 'in'): if k not in value: raise OperationFailure("Missing '%s' parameter to $map" % k) for k in value: if k not in {'input', 'as', 'in'}: raise OperationFailure('Unrecognized parameter to $map: %s' % k) input_array = self._parse_or_nothing(value['input']) if input_array is None or input_array is NOTHING: return None if not isinstance(input_array, (list, tuple)): raise OperationFailure('input to $map must be an array not %s' % type(input_array)) fieldname = value.get('as', 'this') in_expr = value['in'] return [ _Parser( self._doc_dict, dict(self._user_vars, **{fieldname: item}), ignore_missing_keys=self._ignore_missing_keys, ).parse(in_expr) for item in input_array ] if operator == '$size': if isinstance(value, list): if len(value) != 1: raise OperationFailure('Expression $size takes exactly 1 arguments. ' '%d were passed in.' % len(value)) value = value[0] array_value = self._parse_or_nothing(value) if not isinstance(array_value, (list, tuple)): raise OperationFailure( 'The argument to $size must be an array, but was of type: %s' % ('missing' if array_value is NOTHING else type(array_value))) return len(array_value) if operator == '$filter': if not isinstance(value, dict): raise OperationFailure('$filter only supports an object as its argument') extra_params = set(value) - {'input', 'cond', 'as'} if extra_params: raise OperationFailure('Unrecognized parameter to $filter: %s' % extra_params.pop()) missing_params = {'input', 'cond'} - set(value) if missing_params: raise OperationFailure("Missing '%s' parameter to $filter" % missing_params.pop()) input_array = self.parse(value['input']) fieldname = value.get('as', 'this') cond = value['cond'] return [ item for item in input_array if _Parser( self._doc_dict, dict(self._user_vars, **{fieldname: item}), ignore_missing_keys=self._ignore_missing_keys, ).parse(cond) ] if operator == '$slice': if not isinstance(value, list): raise OperationFailure('$slice only supports a list as its argument') if len(value) < 2 or len(value) > 3: raise OperationFailure('Expression $slice takes at least 2 arguments, and at most ' '3, but {} were passed in'.format(len(value))) array_value = self.parse(value[0]) if not isinstance(array_value, list): raise OperationFailure( 'First argument to $slice must be an array, but is of type: {}' .format(type(array_value))) for num, v in zip(('Second', 'Third'), value[1:]): if not isinstance(v, int): raise OperationFailure( '{} argument to $slice must be numeric, but is of type: {}' .format(num, type(v))) if len(value) > 2 and value[2] <= 0: raise OperationFailure('Third argument to $slice must be ' 'positive: {}'.format(value[2])) start = value[1] if start < 0: if len(value) > 2: stop = len(array_value) + start + value[2] else: stop = None elif len(value) > 2: stop = start + value[2] else: stop = start start = 0 return array_value[start:stop] raise NotImplementedError( "Although '%s' is a valid array operator for the " 'aggregation pipeline, it is currently not implemented ' 'in Mongomock.' % operator) def _handle_type_convertion_operator(self, operator, values): if operator == '$toString': try: parsed = self.parse(values) except KeyError: return None if isinstance(parsed, bool): return str(parsed).lower() if isinstance(parsed, datetime.datetime): return parsed.isoformat()[:-3] + 'Z' return str(parsed) if operator == '$toInt': try: parsed = self.parse(values) except KeyError: return None if decimal_support: if isinstance(parsed, decimal128.Decimal128): return int(parsed.to_decimal()) return int(parsed) raise NotImplementedError( 'You need to import the pymongo library to support decimal128 type.' ) if operator == '$toLong': try: parsed = self.parse(values) except KeyError: return None if decimal_support: if isinstance(parsed, decimal128.Decimal128): return int(parsed.to_decimal()) return int(parsed) raise NotImplementedError( 'You need to import the pymongo library to support decimal128 type.' ) # Document: https://docs.mongodb.com/manual/reference/operator/aggregation/toDecimal/ if operator == '$toDecimal': if not decimal_support: raise NotImplementedError( 'You need to import the pymongo library to support decimal128 type.' ) try: parsed = self.parse(values) except KeyError: return None if isinstance(parsed, bool): parsed = '1' if parsed is True else '0' decimal_value = decimal128.Decimal128(parsed) elif isinstance(parsed, int): decimal_value = decimal128.Decimal128(str(parsed)) elif isinstance(parsed, float): exp = decimal.Decimal('.00000000000000') decimal_value = decimal.Decimal(str(parsed)).quantize(exp) decimal_value = decimal128.Decimal128(decimal_value) elif isinstance(parsed, decimal128.Decimal128): decimal_value = parsed elif isinstance(parsed, str): try: decimal_value = decimal128.Decimal128(parsed) except decimal.InvalidOperation as err: raise OperationFailure( "Failed to parse number '%s' in $convert with no onError value:" 'Failed to parse string to decimal' % parsed) from err elif isinstance(parsed, datetime.datetime): epoch = datetime.datetime.utcfromtimestamp(0) string_micro_seconds = str((parsed - epoch).total_seconds() * 1000).split('.', 1)[0] decimal_value = decimal128.Decimal128(string_micro_seconds) else: raise TypeError("'%s' type is not supported" % type(parsed)) return decimal_value # Document: https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/ if operator == '$arrayToObject': try: parsed = self.parse(values) except KeyError: return None if parsed is None: return None if not isinstance(parsed, (list, tuple)): raise OperationFailure( '$arrayToObject requires an array input, found: {}'.format(type(parsed)) ) if all(isinstance(x, dict) and set(x.keys()) == {'k', 'v'} for x in parsed): return {d['k']: d['v'] for d in parsed} if all(isinstance(x, (list, tuple)) and len(x) == 2 for x in parsed): return dict(parsed) raise OperationFailure( 'arrays used with $arrayToObject must contain documents ' 'with k and v fields or two-element arrays' ) # Document: https://docs.mongodb.com/manual/reference/operator/aggregation/objectToArray/ if operator == '$objectToArray': try: parsed = self.parse(values) except KeyError: return None if parsed is None: return None if not isinstance(parsed, (dict, collections.OrderedDict)): raise OperationFailure( '$objectToArray requires an object input, found: {}'.format(type(parsed)) ) if len(parsed) > 1 and sys.version_info < (3, 6): raise NotImplementedError( "Although '%s' is a valid type conversion, it is not implemented for Python 2 " 'and Python 3.5 in Mongomock yet.' % operator) return [{'k': k, 'v': v} for k, v in parsed.items()] raise NotImplementedError( "Although '%s' is a valid type conversion operator for the " 'aggregation pipeline, it is currently not implemented ' 'in Mongomock.' % operator) def _handle_type_operator(self, operator, values): # Document: https://docs.mongodb.com/manual/reference/operator/aggregation/isNumber/ if operator == '$isNumber': try: parsed = self.parse(values) except KeyError: return False return False if isinstance(parsed, bool) else isinstance(parsed, numbers.Number) # Document: https://docs.mongodb.com/manual/reference/operator/aggregation/isArray/ if operator == '$isArray': try: parsed = self.parse(values) except KeyError: return False return isinstance(parsed, (tuple, list)) raise NotImplementedError( # pragma: no cover "Although '%s' is a valid type operator for the aggregation pipeline, it is currently " 'not implemented in Mongomock.' % operator) def _handle_conditional_operator(self, operator, values): if operator == '$ifNull': fields = values[:-1] if len(fields) > 1 and version.parse(mongomock.SERVER_VERSION) <= version.parse('4.4'): raise OperationFailure( '$ifNull supports only one input expression ' ' in MongoDB v4.4 and lower' ) fallback = values[-1] for field in fields: try: out_value = self.parse(field) if out_value is not None: return out_value except KeyError: pass return self.parse(fallback) if operator == '$cond': if isinstance(values, list): condition, true_case, false_case = values elif isinstance(values, dict): condition = values['if'] true_case = values['then'] false_case = values['else'] condition_value = self._parse_to_bool(condition) expression = true_case if condition_value else false_case return self.parse(expression) # This should never happen: it is only a safe fallback if something went wrong. raise NotImplementedError( # pragma: no cover "Although '%s' is a valid conditional operator for the " 'aggregation pipeline, it is currently not implemented ' ' in Mongomock.' % operator) def _handle_control_flow_operator(self, operator, values): if operator == '$switch': if not isinstance(values, dict): raise OperationFailure( '$switch requires an object as an argument, ' 'found: %s' % type(values) ) branches = values.get('branches', []) if not isinstance(branches, (list, tuple)): raise OperationFailure( "$switch expected an array for 'branches', " 'found: %s' % type(branches) ) if not branches: raise OperationFailure( '$switch requires at least one branch.' ) for branch in branches: if not isinstance(branch, dict): raise OperationFailure( '$switch expected each branch to be an object, ' 'found: %s' % type(branch) ) if 'case' not in branch: raise OperationFailure( "$switch requires each branch have a 'case' expression" ) if 'then' not in branch: raise OperationFailure( "$switch requires each branch have a 'then' expression." ) for branch in branches: if self._parse_to_bool(branch['case']): return self.parse(branch['then']) if 'default' not in values: raise OperationFailure( '$switch could not find a matching branch for an input, ' 'and no default was specified.' ) return self.parse(values['default']) # This should never happen: it is only a safe fallback if something went wrong. raise NotImplementedError( # pragma: no cover "Although '%s' is a valid control flow operator for the " 'aggregation pipeline, it is currently not implemented ' 'in Mongomock.' % operator) def _handle_set_operator(self, operator, values): if operator == '$in': expression, array = values return self.parse(expression) in self.parse(array) if operator == '$setUnion': result = [] for set_value in values: for value in self.parse(set_value): if value not in result: result.append(value) return result if operator == '$setEquals': set_values = [set(self.parse(value)) for value in values] for set1, set2 in itertools.combinations(set_values, 2): if set1 != set2: return False return True raise NotImplementedError( "Although '%s' is a valid set operator for the aggregation " 'pipeline, it is currently not implemented in Mongomock.' % operator) def _parse_expression(expression, doc_dict, ignore_missing_keys=False): """Parse an expression. Args: expression: an Aggregate Expression, see https://docs.mongodb.com/manual/meta/aggregation-quick-reference/#aggregation-expressions. doc_dict: the document on which to evaluate the expression. ignore_missing_keys: if True, missing keys evaluated by the expression are ignored silently if it is possible. """ return _Parser(doc_dict, ignore_missing_keys=ignore_missing_keys).parse(expression) filtering.register_parse_expression(_parse_expression) def _accumulate_group(output_fields, group_list): doc_dict = {} for field, value in output_fields.items(): if field == '_id': continue for operator, key in value.items(): values = [] for doc in group_list: try: values.append(_parse_expression(key, doc)) except KeyError: continue if operator in _GROUPING_OPERATOR_MAP: doc_dict[field] = _GROUPING_OPERATOR_MAP[operator](values) elif operator == '$addToSet': value = [] val_it = (val or None for val in values) # Don't use set in case elt in not hashable (like dicts). for elt in val_it: if elt not in value: value.append(elt) doc_dict[field] = value elif operator == '$push': if field not in doc_dict: doc_dict[field] = values else: doc_dict[field].extend(values) elif operator in group_operators: raise NotImplementedError( 'Although %s is a valid group operator for the ' 'aggregation pipeline, it is currently not implemented ' 'in Mongomock.' % operator) else: raise NotImplementedError( '%s is not a valid group operator for the aggregation ' 'pipeline. See http://docs.mongodb.org/manual/meta/' 'aggregation-quick-reference/ for a complete list of ' 'valid operators.' % operator) return doc_dict def _fix_sort_key(key_getter): def fixed_getter(doc): key = key_getter(doc) # Convert dictionaries to make sorted() work in Python 3. if isinstance(key, dict): return [(k, v) for (k, v) in sorted(key.items())] return key return fixed_getter def _handle_lookup_stage(in_collection, database, options): for operator in ('let', 'pipeline'): if operator in options: raise NotImplementedError( "Although '%s' is a valid lookup operator for the " 'aggregation pipeline, it is currently not ' 'implemented in Mongomock.' % operator) for operator in ('from', 'localField', 'foreignField', 'as'): if operator not in options: raise OperationFailure( "Must specify '%s' field for a $lookup" % operator) if not isinstance(options[operator], str): raise OperationFailure( 'Arguments to $lookup must be strings') if operator in ('as', 'localField', 'foreignField') and \ options[operator].startswith('$'): raise OperationFailure( "FieldPath field names may not start with '$'") if operator == 'as' and \ '.' in options[operator]: raise NotImplementedError( "Although '.' is valid in the 'as' " 'parameters for the lookup stage of the aggregation ' 'pipeline, it is currently not implemented in Mongomock.') foreign_name = options['from'] local_field = options['localField'] foreign_field = options['foreignField'] local_name = options['as'] foreign_collection = database.get_collection(foreign_name) for doc in in_collection: try: query = helpers.get_value_by_dot(doc, local_field) except KeyError: query = None if isinstance(query, list): query = {'$in': query} matches = foreign_collection.find({foreign_field: query}) doc[local_name] = [foreign_doc for foreign_doc in matches] return in_collection def _recursive_get(match, nested_fields): head = match.get(nested_fields[0]) remaining_fields = nested_fields[1:] if not remaining_fields: # Final/last field reached. yield head return # More fields to go, must be list, tuple, or dict. if isinstance(head, (list, tuple)): for m in head: # Yield from _recursive_get(m, remaining_fields). for answer in _recursive_get(m, remaining_fields): yield answer elif isinstance(head, dict): # Yield from _recursive_get(head, remaining_fields). for answer in _recursive_get(head, remaining_fields): yield answer def _handle_graph_lookup_stage(in_collection, database, options): if not isinstance(options.get('maxDepth', 0), int): raise OperationFailure( "Argument 'maxDepth' to $graphLookup must be a number") if not isinstance(options.get('restrictSearchWithMatch', {}), dict): raise OperationFailure( "Argument 'restrictSearchWithMatch' to $graphLookup must be a Dictionary") if not isinstance(options.get('depthField', ''), str): raise OperationFailure( "Argument 'depthField' to $graphlookup must be a string") if 'startWith' not in options: raise OperationFailure( "Must specify 'startWith' field for a $graphLookup") for operator in ('as', 'connectFromField', 'connectToField', 'from'): if operator not in options: raise OperationFailure( "Must specify '%s' field for a $graphLookup" % operator) if not isinstance(options[operator], str): raise OperationFailure( "Argument '%s' to $graphLookup must be string" % operator) if options[operator].startswith('$'): raise OperationFailure("FieldPath field names may not start with '$'") if operator == 'as' and '.' in options[operator]: raise NotImplementedError( "Although '.' is valid in the '%s' " 'parameter for the $graphLookup stage of the aggregation ' 'pipeline, it is currently not implemented in Mongomock.' % operator) foreign_name = options['from'] start_with = options['startWith'] connect_from_field = options['connectFromField'] connect_to_field = options['connectToField'] local_name = options['as'] max_depth = options.get('maxDepth', None) depth_field = options.get('depthField', None) restrict_search_with_match = options.get('restrictSearchWithMatch', {}) foreign_collection = database.get_collection(foreign_name) out_doc = copy.deepcopy(in_collection) # TODO(pascal): speed the deep copy def _find_matches_for_depth(query): if isinstance(query, list): query = {'$in': query} matches = foreign_collection.find({connect_to_field: query}) new_matches = [] for new_match in matches: if filtering.filter_applies(restrict_search_with_match, new_match) \ and new_match['_id'] not in found_items: if depth_field is not None: new_match = collections.OrderedDict(new_match, **{depth_field: depth}) new_matches.append(new_match) found_items.add(new_match['_id']) return new_matches for doc in out_doc: found_items = set() depth = 0 try: result = _parse_expression(start_with, doc) except KeyError: continue origin_matches = doc[local_name] = _find_matches_for_depth(result) while origin_matches and (max_depth is None or depth < max_depth): depth += 1 newly_discovered_matches = [] for match in origin_matches: nested_fields = connect_from_field.split('.') for match_target in _recursive_get(match, nested_fields): newly_discovered_matches += _find_matches_for_depth(match_target) doc[local_name] += newly_discovered_matches origin_matches = newly_discovered_matches return out_doc def _handle_group_stage(in_collection, unused_database, options): grouped_collection = [] _id = options['_id'] if _id: def _key_getter(doc): try: return _parse_expression(_id, doc, ignore_missing_keys=True) except KeyError: return None def _sort_key_getter(doc): return filtering.BsonComparable(_key_getter(doc)) # Sort the collection only for the itertools.groupby. # $group does not order its output document. sorted_collection = sorted(in_collection, key=_sort_key_getter) grouped = itertools.groupby(sorted_collection, _key_getter) else: grouped = [(None, in_collection)] for doc_id, group in grouped: group_list = ([x for x in group]) doc_dict = _accumulate_group(options, group_list) doc_dict['_id'] = doc_id grouped_collection.append(doc_dict) return grouped_collection def _handle_bucket_stage(in_collection, unused_database, options): unknown_options = set(options) - {'groupBy', 'boundaries', 'output', 'default'} if unknown_options: raise OperationFailure( 'Unrecognized option to $bucket: %s.' % unknown_options.pop()) if 'groupBy' not in options or 'boundaries' not in options: raise OperationFailure( "$bucket requires 'groupBy' and 'boundaries' to be specified.") group_by = options['groupBy'] boundaries = options['boundaries'] if not isinstance(boundaries, list): raise OperationFailure( "The $bucket 'boundaries' field must be an array, but found type: %s" % type(boundaries)) if len(boundaries) < 2: raise OperationFailure( "The $bucket 'boundaries' field must have at least 2 values, but " 'found %d value(s).' % len(boundaries)) if sorted(boundaries) != boundaries: raise OperationFailure( "The 'boundaries' option to $bucket must be sorted in ascending order") output_fields = options.get('output', {'count': {'$sum': 1}}) default_value = options.get('default', None) try: is_default_last = default_value >= boundaries[-1] except TypeError: is_default_last = True def _get_default_bucket(): try: return options['default'] except KeyError as err: raise OperationFailure( '$bucket could not find a matching branch for ' 'an input, and no default was specified.') from err def _get_bucket_id(doc): """Get the bucket ID for a document. Note that it actually returns a tuple with the first param being a sort key to sort the default bucket even if it's not the same type as the boundaries. """ try: value = _parse_expression(group_by, doc) except KeyError: return (is_default_last, _get_default_bucket()) index = bisect.bisect_right(boundaries, value) if index and index < len(boundaries): return (False, boundaries[index - 1]) return (is_default_last, _get_default_bucket()) in_collection = ((_get_bucket_id(doc), doc) for doc in in_collection) out_collection = sorted(in_collection, key=lambda kv: kv[0]) grouped = itertools.groupby(out_collection, lambda kv: kv[0]) out_collection = [] for (unused_key, doc_id), group in grouped: group_list = [kv[1] for kv in group] doc_dict = _accumulate_group(output_fields, group_list) doc_dict['_id'] = doc_id out_collection.append(doc_dict) return out_collection def _handle_sample_stage(in_collection, unused_database, options): if not isinstance(options, dict): raise OperationFailure('the $sample stage specification must be an object') size = options.pop('size', None) if size is None: raise OperationFailure('$sample stage must specify a size') if options: raise OperationFailure('unrecognized option to $sample: %s' % set(options).pop()) shuffled = list(in_collection) _random.shuffle(shuffled) return shuffled[:size] def _handle_sort_stage(in_collection, unused_database, options): sort_array = reversed([{x: y} for x, y in options.items()]) sorted_collection = in_collection for sort_pair in sort_array: for sortKey, sortDirection in sort_pair.items(): sorted_collection = sorted( sorted_collection, key=lambda x: filtering.resolve_sort_key(sortKey, x), reverse=sortDirection < 0) return sorted_collection def _handle_unwind_stage(in_collection, unused_database, options): if not isinstance(options, dict): options = {'path': options} path = options['path'] if not isinstance(path, str) or path[0] != '$': raise ValueError( '$unwind failed: exception: field path references must be prefixed ' "with a '$' '%s'" % path) path = path[1:] should_preserve_null_and_empty = options.get('preserveNullAndEmptyArrays') include_array_index = options.get('includeArrayIndex') unwound_collection = [] for doc in in_collection: try: array_value = helpers.get_value_by_dot(doc, path) except KeyError: if should_preserve_null_and_empty: unwound_collection.append(doc) continue if array_value is None: if should_preserve_null_and_empty: unwound_collection.append(doc) continue if array_value == []: if should_preserve_null_and_empty: new_doc = copy.deepcopy(doc) # We just ran a get_value_by_dot so we know the value exists. helpers.delete_value_by_dot(new_doc, path) unwound_collection.append(new_doc) continue if isinstance(array_value, list): iter_array = enumerate(array_value) else: iter_array = [(None, array_value)] for index, field_item in iter_array: new_doc = copy.deepcopy(doc) new_doc = helpers.set_value_by_dot(new_doc, path, field_item) if include_array_index: new_doc = helpers.set_value_by_dot(new_doc, include_array_index, index) unwound_collection.append(new_doc) return unwound_collection # TODO(pascal): Combine with the equivalent function in collection but check # what are the allowed overriding. def _combine_projection_spec(filter_list, original_filter, prefix=''): """Re-format a projection fields spec into a nested dictionary. e.g: ['a', 'b.c', 'b.d'] => {'a': 1, 'b': {'c': 1, 'd': 1}} """ if not isinstance(filter_list, list): return filter_list filter_dict = collections.OrderedDict() for key in filter_list: field, separator, subkey = key.partition('.') if not separator: if isinstance(filter_dict.get(field), list): other_key = field + '.' + filter_dict[field][0] raise OperationFailure( 'Invalid $project :: caused by :: specification contains two conflicting paths.' ' Cannot specify both %s and %s: %s' % ( repr(prefix + field), repr(prefix + other_key), original_filter)) filter_dict[field] = 1 continue if not isinstance(filter_dict.get(field, []), list): raise OperationFailure( 'Invalid $project :: caused by :: specification contains two conflicting paths.' ' Cannot specify both %s and %s: %s' % ( repr(prefix + field), repr(prefix + key), original_filter)) filter_dict[field] = filter_dict.get(field, []) + [subkey] return collections.OrderedDict( (k, _combine_projection_spec(v, original_filter, prefix='%s%s.' % (prefix, k))) for k, v in filter_dict.items() ) def _project_by_spec(doc, proj_spec, is_include): output = {} for key, value in doc.items(): if key not in proj_spec: if not is_include: output[key] = value continue if not isinstance(proj_spec[key], dict): if is_include: output[key] = value continue if isinstance(value, dict): output[key] = _project_by_spec(value, proj_spec[key], is_include) elif isinstance(value, list): output[key] = [_project_by_spec(array_value, proj_spec[key], is_include) for array_value in value if isinstance(array_value, dict)] elif not is_include: output[key] = value return output def _handle_replace_root_stage(in_collection, unused_database, options): if 'newRoot' not in options: raise OperationFailure("Parameter 'newRoot' is missing for $replaceRoot operation.") new_root = options['newRoot'] out_collection = [] for doc in in_collection: try: new_doc = _parse_expression(new_root, doc, ignore_missing_keys=True) except KeyError: new_doc = NOTHING if not isinstance(new_doc, dict): raise OperationFailure( "'newRoot' expression must evaluate to an object, but resulting value was: {}" .format(new_doc)) out_collection.append(new_doc) return out_collection def _handle_project_stage(in_collection, unused_database, options): filter_list = [] method = None include_id = options.get('_id') # Compute new values for each field, except inclusion/exclusions that are # handled in one final step. new_fields_collection = None for field, value in options.items(): if method is None and (field != '_id' or value): method = 'include' if value else 'exclude' elif method == 'include' and not value and field != '_id': raise OperationFailure( 'Bad projection specification, cannot exclude fields ' "other than '_id' in an inclusion projection: %s" % options) elif method == 'exclude' and value: raise OperationFailure( 'Bad projection specification, cannot include fields ' 'or add computed fields during an exclusion projection: %s' % options) if value in (0, 1, True, False): if field != '_id': filter_list.append(field) continue if not new_fields_collection: new_fields_collection = [{} for unused_doc in in_collection] for in_doc, out_doc in zip(in_collection, new_fields_collection): try: out_doc[field] = _parse_expression(value, in_doc, ignore_missing_keys=True) except KeyError: # Ignore missing key. pass if (method == 'include') == (include_id is not False and include_id != 0): filter_list.append('_id') if not filter_list: return new_fields_collection # Final steps: include or exclude fields and merge with newly created fields. projection_spec = _combine_projection_spec(filter_list, original_filter=options) out_collection = [ _project_by_spec(doc, projection_spec, is_include=(method == 'include')) for doc in in_collection ] if new_fields_collection: return [ dict(a, **b) for a, b in zip(out_collection, new_fields_collection) ] return out_collection def _handle_add_fields_stage(in_collection, unused_database, options): if not options: raise OperationFailure( 'Invalid $addFields :: caused by :: specification must have at least one field') out_collection = [dict(doc) for doc in in_collection] for field, value in options.items(): for in_doc, out_doc in zip(in_collection, out_collection): try: out_value = _parse_expression(value, in_doc, ignore_missing_keys=True) except KeyError: continue parts = field.split('.') for subfield in parts[:-1]: out_doc[subfield] = out_doc.get(subfield, {}) if not isinstance(out_doc[subfield], dict): out_doc[subfield] = {} out_doc = out_doc[subfield] out_doc[parts[-1]] = out_value return out_collection def _handle_out_stage(in_collection, database, options): # TODO(MetrodataTeam): should leave the origin collection unchanged out_collection = database.get_collection(options) if out_collection.find_one(): out_collection.drop() if in_collection: out_collection.insert_many(in_collection) return in_collection def _handle_count_stage(in_collection, database, options): if not isinstance(options, str) or options == '': raise OperationFailure('the count field must be a non-empty string') elif options.startswith('$'): raise OperationFailure('the count field cannot be a $-prefixed path') elif '.' in options: raise OperationFailure("the count field cannot contain '.'") return [{options: len(in_collection)}] def _handle_facet_stage(in_collection, database, options): out_collection_by_pipeline = {} for pipeline_title, pipeline in options.items(): out_collection_by_pipeline[pipeline_title] = list(process_pipeline( in_collection, database, pipeline, None)) return [out_collection_by_pipeline] def _handle_match_stage(in_collection, database, options): spec = helpers.patch_datetime_awareness_in_document(options) return [ doc for doc in in_collection if filtering.filter_applies(spec, helpers.patch_datetime_awareness_in_document(doc)) ] _PIPELINE_HANDLERS = { '$addFields': _handle_add_fields_stage, '$bucket': _handle_bucket_stage, '$bucketAuto': None, '$collStats': None, '$count': _handle_count_stage, '$currentOp': None, '$facet': _handle_facet_stage, '$geoNear': None, '$graphLookup': _handle_graph_lookup_stage, '$group': _handle_group_stage, '$indexStats': None, '$limit': lambda c, d, o: c[:o], '$listLocalSessions': None, '$listSessions': None, '$lookup': _handle_lookup_stage, '$match': _handle_match_stage, '$merge': None, '$out': _handle_out_stage, '$planCacheStats': None, '$project': _handle_project_stage, '$redact': None, '$replaceRoot': _handle_replace_root_stage, '$replaceWith': None, '$sample': _handle_sample_stage, '$set': _handle_add_fields_stage, '$skip': lambda c, d, o: c[o:], '$sort': _handle_sort_stage, '$sortByCount': None, '$unset': None, '$unwind': _handle_unwind_stage, } def process_pipeline(collection, database, pipeline, session): if session: raise NotImplementedError('Mongomock does not handle sessions yet') for stage in pipeline: for operator, options in stage.items(): try: handler = _PIPELINE_HANDLERS[operator] except KeyError as err: raise NotImplementedError( '%s is not a valid operator for the aggregation pipeline. ' 'See http://docs.mongodb.org/manual/meta/aggregation-quick-reference/ ' 'for a complete list of valid operators.' % operator) from err if not handler: raise NotImplementedError( "Although '%s' is a valid operator for the aggregation pipeline, it is " 'currently not implemented in Mongomock.' % operator) collection = handler(collection, database, options) return command_cursor.CommandCursor(collection) mongomock-4.1.3/mongomock/codec_options.py000066400000000000000000000053321456276026200207200ustar00rootroot00000000000000"""Tools for specifying BSON codec options.""" import collections from packaging import version from mongomock import helpers try: from bson import codec_options except ImportError: codec_options = None class TypeRegistry(object): pass _FIELDS = ( 'document_class', 'tz_aware', 'uuid_representation', 'unicode_decode_error_handler', 'tzinfo', ) if codec_options and helpers.PYMONGO_VERSION >= version.parse('3.8'): _DEFAULT_TYPE_REGISTRY = codec_options.TypeRegistry() _FIELDS = _FIELDS + ('type_registry',) else: _DEFAULT_TYPE_REGISTRY = TypeRegistry() # New default in Pymongo v4: # https://pymongo.readthedocs.io/en/stable/examples/uuid.html#unspecified if helpers.PYMONGO_VERSION >= version.parse('4.0'): _DEFAULT_UUID_REPRESENTATION = 0 else: _DEFAULT_UUID_REPRESENTATION = 3 class CodecOptions(collections.namedtuple('CodecOptions', _FIELDS)): def __new__(cls, document_class=dict, tz_aware=False, uuid_representation=None, unicode_decode_error_handler='strict', tzinfo=None, type_registry=None): if document_class != dict: raise NotImplementedError( 'Mongomock does not implement custom document_class yet: %r' % document_class) if not isinstance(tz_aware, bool): raise TypeError('tz_aware must be True or False') if uuid_representation is None: uuid_representation = _DEFAULT_UUID_REPRESENTATION if uuid_representation != _DEFAULT_UUID_REPRESENTATION: raise NotImplementedError('Mongomock does not handle custom uuid_representation yet') if unicode_decode_error_handler not in ('strict', None): raise NotImplementedError( 'Mongomock does not handle custom unicode_decode_error_handler yet') if tzinfo: raise NotImplementedError('Mongomock does not handle custom tzinfo yet') values = ( document_class, tz_aware, uuid_representation, unicode_decode_error_handler, tzinfo) if 'type_registry' in _FIELDS: if not type_registry: type_registry = _DEFAULT_TYPE_REGISTRY elif not type_registry == _DEFAULT_TYPE_REGISTRY: raise NotImplementedError( 'Mongomock does not handle custom type_registry yet %r' % type_registry) values = values + (type_registry,) return tuple.__new__(cls, values) def with_options(self, **kwargs): opts = self._asdict() opts.update(kwargs) return CodecOptions(**opts) def is_supported(custom_codec_options): if not custom_codec_options: return None return CodecOptions(**custom_codec_options._asdict()) mongomock-4.1.3/mongomock/collection.py000066400000000000000000002600371456276026200202300ustar00rootroot00000000000000from __future__ import division import collections from collections import OrderedDict from collections.abc import Iterable, Mapping, MutableMapping import copy import functools import itertools import json import math from packaging import version import time import warnings try: from bson import json_util, SON, BSON from bson.errors import InvalidDocument except ImportError: json_utils = SON = BSON = None try: import execjs except ImportError: execjs = None try: from pymongo.operations import IndexModel from pymongo import ReadPreference from pymongo import ReturnDocument _READ_PREFERENCE_PRIMARY = ReadPreference.PRIMARY except ImportError: class IndexModel(object): pass class ReturnDocument(object): BEFORE = False AFTER = True from mongomock.read_preferences import PRIMARY as _READ_PREFERENCE_PRIMARY from sentinels import NOTHING import mongomock # Used for utcnow - please see https://github.com/mongomock/mongomock#utcnow from mongomock import aggregate from mongomock import codec_options as mongomock_codec_options from mongomock import ConfigurationError, DuplicateKeyError, BulkWriteError from mongomock import filtering from mongomock.filtering import filter_applies from mongomock import helpers from mongomock import InvalidOperation from mongomock.not_implemented import raise_for_feature as raise_not_implemented from mongomock import ObjectId from mongomock import OperationFailure from mongomock.results import BulkWriteResult from mongomock.results import DeleteResult from mongomock.results import InsertManyResult from mongomock.results import InsertOneResult from mongomock.results import UpdateResult from mongomock.write_concern import WriteConcern from mongomock import WriteError try: from pymongo.read_concern import ReadConcern except ImportError: from mongomock.read_concern import ReadConcern _KwargOption = collections.namedtuple('KwargOption', ['typename', 'default', 'attrs']) _WITH_OPTIONS_KWARGS = { 'read_preference': _KwargOption( 'pymongo.read_preference.ReadPreference', _READ_PREFERENCE_PRIMARY, ('document', 'mode', 'mongos_mode', 'max_staleness')), 'write_concern': _KwargOption( 'pymongo.write_concern.WriteConcern', WriteConcern(), ('acknowledged', 'document')), } def validate_is_mapping(option, value): if not isinstance(value, Mapping): raise TypeError('%s must be an instance of dict, bson.son.SON, or ' 'other type that inherits from ' 'collections.Mapping' % (option,)) def validate_is_mutable_mapping(option, value): if not isinstance(value, MutableMapping): raise TypeError('%s must be an instance of dict, bson.son.SON, or ' 'other type that inherits from ' 'collections.MutableMapping' % (option,)) def validate_ok_for_replace(replacement): validate_is_mapping('replacement', replacement) if replacement: first = next(iter(replacement)) if first.startswith('$'): raise ValueError('replacement can not include $ operators') def validate_ok_for_update(update): validate_is_mapping('update', update) if not update: raise ValueError('update only works with $ operators') first = next(iter(update)) if not first.startswith('$'): raise ValueError('update only works with $ operators') def validate_write_concern_params(**params): if params: WriteConcern(**params) class BulkWriteOperation(object): def __init__(self, builder, selector, is_upsert=False): self.builder = builder self.selector = selector self.is_upsert = is_upsert def upsert(self): assert not self.is_upsert return BulkWriteOperation(self.builder, self.selector, is_upsert=True) def register_remove_op(self, multi, hint=None): collection = self.builder.collection selector = self.selector def exec_remove(): if multi: op_result = collection.delete_many(selector, hint=hint).raw_result else: op_result = collection.delete_one(selector, hint=hint).raw_result if op_result.get('ok'): return {'nRemoved': op_result.get('n')} err = op_result.get('err') if err: return {'writeErrors': [err]} return {} self.builder.executors.append(exec_remove) def remove(self): assert not self.is_upsert self.register_remove_op(multi=True) def remove_one(self,): assert not self.is_upsert self.register_remove_op(multi=False) def register_update_op(self, document, multi, **extra_args): if not extra_args.get('remove'): validate_ok_for_update(document) collection = self.builder.collection selector = self.selector def exec_update(): result = collection._update(spec=selector, document=document, multi=multi, upsert=self.is_upsert, **extra_args) ret_val = {} if result.get('upserted'): ret_val['upserted'] = result.get('upserted') ret_val['nUpserted'] = result.get('n') else: matched = result.get('n') if matched is not None: ret_val['nMatched'] = matched modified = result.get('nModified') if modified is not None: ret_val['nModified'] = modified if result.get('err'): ret_val['err'] = result.get('err') return ret_val self.builder.executors.append(exec_update) def update(self, document, hint=None): self.register_update_op(document, multi=True, hint=hint) def update_one(self, document, hint=None): self.register_update_op(document, multi=False, hint=hint) def replace_one(self, document, hint=None): self.register_update_op(document, multi=False, remove=True, hint=hint) def _combine_projection_spec(projection_fields_spec): """Re-format a projection fields spec into a nested dictionary. e.g: {'a': 1, 'b.c': 1, 'b.d': 1} => {'a': 1, 'b': {'c': 1, 'd': 1}} """ tmp_spec = OrderedDict() for f, v in projection_fields_spec.items(): if '.' not in f: if isinstance(tmp_spec.get(f), dict): if not v: raise NotImplementedError( 'Mongomock does not support overriding excluding projection: %s' % projection_fields_spec) raise OperationFailure('Path collision at %s' % f) tmp_spec[f] = v else: split_field = f.split('.', 1) base_field, new_field = tuple(split_field) if not isinstance(tmp_spec.get(base_field), dict): if base_field in tmp_spec: raise OperationFailure( 'Path collision at %s remaining portion %s' % (f, new_field)) tmp_spec[base_field] = OrderedDict() tmp_spec[base_field][new_field] = v combined_spec = OrderedDict() for f, v in tmp_spec.items(): if isinstance(v, dict): combined_spec[f] = _combine_projection_spec(v) else: combined_spec[f] = v return combined_spec def _project_by_spec(doc, combined_projection_spec, is_include, container): if '$' in combined_projection_spec: if is_include: raise NotImplementedError('Positional projection is not implemented in mongomock') raise OperationFailure('Cannot exclude array elements with the positional operator') doc_copy = container() for key, val in doc.items(): spec = combined_projection_spec.get(key, NOTHING) if isinstance(spec, dict): if isinstance(val, (list, tuple)): doc_copy[key] = [_project_by_spec(sub_doc, spec, is_include, container) for sub_doc in val] elif isinstance(val, dict): doc_copy[key] = _project_by_spec(val, spec, is_include, container) elif (is_include and spec is not NOTHING) or (not is_include and spec is NOTHING): doc_copy[key] = _copy_field(val, container) return doc_copy def _copy_field(obj, container): if isinstance(obj, list): new = [] for item in obj: new.append(_copy_field(item, container)) return new if isinstance(obj, dict): new = container() for key, value in obj.items(): new[key] = _copy_field(value, container) return new return copy.copy(obj) def _recursive_key_check_null_character(data): for key, value in data.items(): if '\0' in key: raise InvalidDocument(f'Field names cannot contain the null character (found: {key})') if isinstance(value, Mapping): _recursive_key_check_null_character(value) def _validate_data_fields(data): _recursive_key_check_null_character(data) for key in data.keys(): if key.startswith('$'): raise InvalidDocument(f'Top-level field names cannot start with the "$" sign ' f'(found: {key})') class BulkOperationBuilder(object): def __init__(self, collection, ordered=False, bypass_document_validation=False): self.collection = collection self.ordered = ordered self.results = {} self.executors = [] self.done = False self._insert_returns_nModified = True self._update_returns_nModified = True self._bypass_document_validation = bypass_document_validation def find(self, selector): return BulkWriteOperation(self, selector) def insert(self, doc): def exec_insert(): self.collection.insert_one( doc, bypass_document_validation=self._bypass_document_validation) return {'nInserted': 1} self.executors.append(exec_insert) def __aggregate_operation_result(self, total_result, key, value): agg_val = total_result.get(key) assert agg_val is not None, 'Unknow operation result %s=%s' \ ' (unrecognized key)' % (key, value) if isinstance(agg_val, int): total_result[key] += value elif isinstance(agg_val, list): if key == 'upserted': new_element = {'index': len(agg_val), '_id': value} agg_val.append(new_element) else: agg_val.append(value) else: assert False, 'Fixme: missed aggreation rule for type: %s for' \ ' key {%s=%s}' % (type(agg_val), key, agg_val) def _set_nModified_policy(self, insert, update): self._insert_returns_nModified = insert self._update_returns_nModified = update def execute(self, write_concern=None): if not self.executors: raise InvalidOperation('Bulk operation empty!') if self.done: raise InvalidOperation('Bulk operation already executed!') self.done = True result = {'nModified': 0, 'nUpserted': 0, 'nMatched': 0, 'writeErrors': [], 'upserted': [], 'writeConcernErrors': [], 'nRemoved': 0, 'nInserted': 0} has_update = False has_insert = False broken_nModified_info = False for index, execute_func in enumerate(self.executors): exec_name = execute_func.__name__ try: op_result = execute_func() except WriteError as error: result['writeErrors'].append({ 'index': index, 'code': error.code, 'errmsg': str(error), }) if self.ordered: break continue for (key, value) in op_result.items(): self.__aggregate_operation_result(result, key, value) if exec_name == 'exec_update': has_update = True if 'nModified' not in op_result: broken_nModified_info = True has_insert |= exec_name == 'exec_insert' if broken_nModified_info: result.pop('nModified') elif has_insert and self._insert_returns_nModified: pass elif has_update and self._update_returns_nModified: pass elif self._update_returns_nModified and self._insert_returns_nModified: pass else: result.pop('nModified') if result.get('writeErrors'): raise BulkWriteError(result) return result def add_insert(self, doc): self.insert(doc) def add_update(self, selector, doc, multi=False, upsert=False, collation=None, array_filters=None, hint=None): if array_filters: raise_not_implemented( 'array_filters', 'Array filters are not implemented in mongomock yet.') write_operation = BulkWriteOperation(self, selector, is_upsert=upsert) write_operation.register_update_op(doc, multi, hint=hint) def add_replace(self, selector, doc, upsert, collation=None, hint=None): write_operation = BulkWriteOperation(self, selector, is_upsert=upsert) write_operation.replace_one(doc, hint=hint) def add_delete(self, selector, just_one, collation=None, hint=None): write_operation = BulkWriteOperation(self, selector, is_upsert=False) write_operation.register_remove_op(not just_one, hint=hint) class Collection(object): def __init__( self, database, name, _db_store, write_concern=None, read_concern=None, read_preference=None, codec_options=None): self.database = database self._name = name self._db_store = _db_store self._write_concern = write_concern or WriteConcern() if read_concern and not isinstance(read_concern, ReadConcern): raise TypeError('read_concern must be an instance of pymongo.read_concern.ReadConcern') self._read_concern = read_concern or ReadConcern() self._read_preference = read_preference or _READ_PREFERENCE_PRIMARY self._codec_options = codec_options or mongomock_codec_options.CodecOptions() def __repr__(self): return "Collection({0}, '{1}')".format(self.database, self.name) def __getitem__(self, name): return self.database[self.name + '.' + name] def __getattr__(self, attr): if attr.startswith('_'): raise AttributeError( "%s has no attribute '%s'. To access the %s.%s collection, use database['%s.%s']." % (self.__class__.__name__, attr, self.name, attr, self.name, attr)) return self.__getitem__(attr) def __call__(self, *args, **kwargs): name = self._name if '.' not in self._name else self._name.split('.')[-1] raise TypeError( "'Collection' object is not callable. If you meant to call the '%s' method on a " "'Collection' object it is failing because no such method exists." % name) def __eq__(self, other): if isinstance(other, self.__class__): return self.database == other.database and self.name == other.name return NotImplemented if helpers.PYMONGO_VERSION >= version.parse('3.12'): def __hash__(self): return hash((self.database, self.name)) @property def full_name(self): return '{0}.{1}'.format(self.database.name, self._name) @property def name(self): return self._name @property def write_concern(self): return self._write_concern @property def read_concern(self): return self._read_concern @property def read_preference(self): return self._read_preference @property def codec_options(self): return self._codec_options def initialize_unordered_bulk_op(self, bypass_document_validation=False): return BulkOperationBuilder( self, ordered=False, bypass_document_validation=bypass_document_validation) def initialize_ordered_bulk_op(self, bypass_document_validation=False): return BulkOperationBuilder( self, ordered=True, bypass_document_validation=bypass_document_validation) if helpers.PYMONGO_VERSION < version.parse('4.0'): def insert(self, data, manipulate=True, check_keys=True, continue_on_error=False, **kwargs): warnings.warn('insert is deprecated. Use insert_one or insert_many ' 'instead.', DeprecationWarning, stacklevel=2) validate_write_concern_params(**kwargs) return self._insert(data) def insert_one(self, document, bypass_document_validation=False, session=None): if not bypass_document_validation: validate_is_mutable_mapping('document', document) return InsertOneResult(self._insert(document, session), acknowledged=True) def insert_many(self, documents, ordered=True, bypass_document_validation=False, session=None): if not isinstance(documents, Iterable) or not documents: raise TypeError('documents must be a non-empty list') documents = list(documents) if not bypass_document_validation: for document in documents: validate_is_mutable_mapping('document', document) return InsertManyResult( self._insert(documents, session, ordered=ordered), acknowledged=True) @property def _store(self): return self._db_store[self._name] def _insert(self, data, session=None, ordered=True): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') if not isinstance(data, Mapping): results = [] write_errors = [] num_inserted = 0 for index, item in enumerate(data): try: results.append(self._insert(item)) except WriteError as error: write_errors.append({ 'index': index, 'code': error.code, 'errmsg': str(error), 'op': item, }) if ordered: break else: continue num_inserted += 1 if write_errors: raise BulkWriteError({ 'writeErrors': write_errors, 'nInserted': num_inserted, }) return results if not all(isinstance(k, str) for k in data): raise ValueError('Document keys must be strings') if BSON: # bson validation check_keys = helpers.PYMONGO_VERSION < version.parse('3.6') if not check_keys: _validate_data_fields(data) BSON.encode(data, check_keys=check_keys) # Like pymongo, we should fill the _id in the inserted dict (odd behavior, # but we need to stick to it), so we must patch in-place the data dict if '_id' not in data: data['_id'] = ObjectId() object_id = data['_id'] if isinstance(object_id, dict): object_id = helpers.hashdict(object_id) if object_id in self._store: raise DuplicateKeyError('E11000 Duplicate Key Error', 11000) data = helpers.patch_datetime_awareness_in_document(data) self._store[object_id] = data try: self._ensure_uniques(data) except DuplicateKeyError: # Rollback del self._store[object_id] raise return data['_id'] def _ensure_uniques(self, new_data): # Note we consider new_data is already inserted in db for index in self._store.indexes.values(): if not index.get('unique'): continue unique = index.get('key') is_sparse = index.get('sparse') partial_filter_expression = index.get('partialFilterExpression') find_kwargs = {} for key, _ in unique: try: find_kwargs[key] = helpers.get_value_by_dot(new_data, key) except KeyError: find_kwargs[key] = None if is_sparse and set(find_kwargs.values()) == {None}: continue if partial_filter_expression is not None: find_kwargs = {'$and': [partial_filter_expression, find_kwargs]} answer_count = len(list(self._iter_documents(find_kwargs))) if answer_count > 1: raise DuplicateKeyError('E11000 Duplicate Key Error', 11000) def _internalize_dict(self, d): return {k: copy.deepcopy(v) for k, v in d.items()} def _has_key(self, doc, key): key_parts = key.split('.') sub_doc = doc for part in key_parts: if part not in sub_doc: return False sub_doc = sub_doc[part] return True def update_one( self, filter, update, upsert=False, bypass_document_validation=False, collation=None, array_filters=None, hint=None, session=None, let=None): if not bypass_document_validation: validate_ok_for_update(update) return UpdateResult( self._update( filter, update, upsert=upsert, hint=hint, session=session, collation=collation, array_filters=array_filters, let=let), acknowledged=True) def update_many( self, filter, update, upsert=False, array_filters=None, bypass_document_validation=False, collation=None, hint=None, session=None, let=None): if not bypass_document_validation: validate_ok_for_update(update) return UpdateResult( self._update( filter, update, upsert=upsert, multi=True, hint=hint, session=session, collation=collation, array_filters=array_filters, let=let), acknowledged=True) def replace_one( self, filter, replacement, upsert=False, bypass_document_validation=False, session=None, hint=None): if not bypass_document_validation: validate_ok_for_replace(replacement) return UpdateResult( self._update(filter, replacement, upsert=upsert, hint=hint, session=session), acknowledged=True) if helpers.PYMONGO_VERSION < version.parse('4.0'): def update(self, spec, document, upsert=False, manipulate=False, multi=False, check_keys=False, **kwargs): warnings.warn('update is deprecated. Use replace_one, update_one or ' 'update_many instead.', DeprecationWarning, stacklevel=2) return self._update(spec, document, upsert, manipulate, multi, check_keys, **kwargs) def _update(self, spec, document, upsert=False, manipulate=False, multi=False, check_keys=False, hint=None, session=None, collation=None, let=None, array_filters=None, **kwargs): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') if hint: raise NotImplementedError( 'The hint argument of update is valid but has not been implemented in ' 'mongomock yet') if collation: raise_not_implemented( 'collation', 'The collation argument of update is valid but has not been implemented in ' 'mongomock yet') if array_filters: raise_not_implemented( 'array_filters', 'Array filters are not implemented in mongomock yet.') if let: raise_not_implemented( 'let', 'The let argument of update is valid but has not been implemented in mongomock ' 'yet') spec = helpers.patch_datetime_awareness_in_document(spec) document = helpers.patch_datetime_awareness_in_document(document) validate_is_mapping('spec', spec) validate_is_mapping('document', document) if self.database.client.server_info()['versionArray'] < [5]: for operator in _updaters: if not document.get(operator, True): raise WriteError( "'%s' is empty. You must specify a field like so: {%s: {: ...}}" % (operator, operator), ) updated_existing = False upserted_id = None num_updated = 0 num_matched = 0 for existing_document in itertools.chain(self._iter_documents(spec), [None]): # we need was_insert for the setOnInsert update operation was_insert = False # the sentinel document means we should do an upsert if existing_document is None: if not upsert or num_matched: continue # For upsert operation we have first to create a fake existing_document, # update it like a regular one, then finally insert it if spec.get('_id') is not None: _id = spec['_id'] elif document.get('_id') is not None: _id = document['_id'] else: _id = ObjectId() to_insert = dict(spec, _id=_id) to_insert = self._expand_dots(to_insert) to_insert, _ = self._discard_operators(to_insert) existing_document = to_insert was_insert = True else: original_document_snapshot = copy.deepcopy(existing_document) updated_existing = True num_matched += 1 first = True subdocument = None for k, v in document.items(): if k in _updaters: updater = _updaters[k] subdocument = self._update_document_fields_with_positional_awareness( existing_document, v, spec, updater, subdocument) elif k == '$rename': for src, dst in v.items(): if '.' in src or '.' in dst: raise NotImplementedError( 'Using the $rename operator with dots is a valid MongoDB ' 'operation, but it is not yet supported by mongomock' ) if self._has_key(existing_document, src): existing_document[dst] = existing_document.pop(src) elif k == '$setOnInsert': if not was_insert: continue subdocument = self._update_document_fields_with_positional_awareness( existing_document, v, spec, _set_updater, subdocument) elif k == '$currentDate': subdocument = self._update_document_fields_with_positional_awareness( existing_document, v, spec, _current_date_updater, subdocument) elif k == '$addToSet': for field, value in v.items(): nested_field_list = field.rsplit('.') if len(nested_field_list) == 1: if field not in existing_document: existing_document[field] = [] # document should be a list append to it if isinstance(value, dict): if '$each' in value: # append the list to the field existing_document[field] += [ obj for obj in list(value['$each']) if obj not in existing_document[field]] continue if value not in existing_document[field]: existing_document[field].append(value) continue # push to array in a nested attribute else: # create nested attributes if they do not exist subdocument = existing_document for field_part in nested_field_list[:-1]: if field_part == '$': break if field_part not in subdocument: subdocument[field_part] = {} subdocument = subdocument[field_part] # get subdocument with $ oprator support subdocument, _ = self._get_subdocument( existing_document, spec, nested_field_list) # we're pushing a list push_results = [] if nested_field_list[-1] in subdocument: # if the list exists, then use that list push_results = subdocument[ nested_field_list[-1]] if isinstance(value, dict) and '$each' in value: push_results += [ obj for obj in list(value['$each']) if obj not in push_results] elif value not in push_results: push_results.append(value) subdocument[nested_field_list[-1]] = push_results elif k == '$pull': for field, value in v.items(): nested_field_list = field.rsplit('.') # nested fields includes a positional element # need to find that element if '$' in nested_field_list: if not subdocument: subdocument, _ = self._get_subdocument( existing_document, spec, nested_field_list) # value should be a dictionary since we're pulling pull_results = [] # and the last subdoc should be an array for obj in subdocument[nested_field_list[-1]]: if isinstance(obj, dict): for pull_key, pull_value in value.items(): if obj[pull_key] != pull_value: pull_results.append(obj) continue if obj != value: pull_results.append(obj) # cannot write to doc directly as it doesn't save to # existing_document subdocument[nested_field_list[-1]] = pull_results else: arr = existing_document for field_part in nested_field_list: if field_part not in arr: break arr = arr[field_part] if not isinstance(arr, list): continue arr_copy = copy.deepcopy(arr) if isinstance(value, dict): for obj in arr_copy: try: is_matching = filter_applies(value, obj) except OperationFailure: is_matching = False if is_matching: arr.remove(obj) continue if filter_applies({'field': value}, {'field': obj}): arr.remove(obj) else: for obj in arr_copy: if value == obj: arr.remove(obj) elif k == '$pullAll': for field, value in v.items(): nested_field_list = field.rsplit('.') if len(nested_field_list) == 1: if field in existing_document: arr = existing_document[field] existing_document[field] = [ obj for obj in arr if obj not in value] continue else: subdocument, _ = self._get_subdocument( existing_document, spec, nested_field_list) if nested_field_list[-1] in subdocument: arr = subdocument[nested_field_list[-1]] subdocument[nested_field_list[-1]] = [ obj for obj in arr if obj not in value] elif k == '$push': for field, value in v.items(): # Find the place where to push. nested_field_list = field.rsplit('.') subdocument, field = self._get_subdocument( existing_document, spec, nested_field_list) # Push the new element or elements. if isinstance(subdocument, dict) and field not in subdocument: subdocument[field] = [] push_results = subdocument[field] if isinstance(value, dict) and '$each' in value: if '$position' in value: push_results = \ push_results[0:value['$position']] + \ list(value['$each']) + \ push_results[value['$position']:] else: push_results += list(value['$each']) if '$sort' in value: sort_spec = value['$sort'] if isinstance(sort_spec, dict): sort_key = set(sort_spec.keys()).pop() push_results = sorted( push_results, key=lambda d: helpers.get_value_by_dot(d, sort_key), reverse=set(sort_spec.values()).pop() < 0) else: push_results = sorted(push_results, reverse=sort_spec < 0) if '$slice' in value: slice_value = value['$slice'] if slice_value < 0: push_results = push_results[slice_value:] elif slice_value == 0: push_results = [] else: push_results = push_results[:slice_value] unused_modifiers = \ set(value.keys()) - {'$each', '$slice', '$position', '$sort'} if unused_modifiers: raise WriteError( 'Unrecognized clause in $push: ' + unused_modifiers.pop()) else: push_results.append(value) subdocument[field] = push_results else: if first: # replace entire document for key in document.keys(): if key.startswith('$'): # can't mix modifiers with non-modifiers in # update raise ValueError('field names cannot start with $ [{}]'.format(k)) _id = spec.get('_id', existing_document.get('_id')) existing_document.clear() if _id is not None: existing_document['_id'] = _id if BSON: # bson validation check_keys = helpers.PYMONGO_VERSION < version.parse('3.6') if not check_keys: _validate_data_fields(document) BSON.encode(document, check_keys=check_keys) existing_document.update(self._internalize_dict(document)) if existing_document['_id'] != _id: raise OperationFailure( 'The _id field cannot be changed from {0} to {1}' .format(existing_document['_id'], _id)) break else: # can't mix modifiers with non-modifiers in update raise ValueError( 'Invalid modifier specified: {}'.format(k)) first = False # if empty document comes if not document: _id = spec.get('_id', existing_document.get('_id')) existing_document.clear() if _id: existing_document['_id'] = _id if was_insert: upserted_id = self._insert(existing_document) num_updated += 1 elif existing_document != original_document_snapshot: # Document has been modified in-place. # Make sure the ID was not change. if original_document_snapshot.get('_id') != existing_document.get('_id'): # Rollback. self._store[original_document_snapshot['_id']] = original_document_snapshot raise WriteError( "After applying the update, the (immutable) field '_id' was found to have " 'been altered to _id: {}'.format(existing_document.get('_id'))) # Make sure it still respect the unique indexes and, if not, to # revert modifications try: self._ensure_uniques(existing_document) num_updated += 1 except DuplicateKeyError: # Rollback. self._store[original_document_snapshot['_id']] = original_document_snapshot raise if not multi: break return { 'connectionId': self.database.client._id, 'err': None, 'n': num_matched, 'nModified': num_updated if updated_existing else 0, 'ok': 1, 'upserted': upserted_id, 'updatedExisting': updated_existing, } def _get_subdocument(self, existing_document, spec, nested_field_list): """This method retrieves the subdocument of the existing_document.nested_field_list. It uses the spec to filter through the items. It will continue to grab nested documents until it can go no further. It will then return the subdocument that was last saved. '$' is the positional operator, so we use the $elemMatch in the spec to find the right subdocument in the array. """ # Current document in view. doc = existing_document # Previous document in view. parent_doc = existing_document # Current spec in view. subspec = spec # Whether spec is following the document. is_following_spec = True # Walk down the dictionary. for index, subfield in enumerate(nested_field_list): if subfield == '$': if not is_following_spec: raise WriteError( 'The positional operator did not find the match needed from the query') # Positional element should have the equivalent elemMatch in the query. subspec = subspec['$elemMatch'] is_following_spec = False # Iterate through. for spec_index, item in enumerate(doc): if filter_applies(subspec, item): subfield = spec_index break else: raise WriteError( 'The positional operator did not find the match needed from the query') parent_doc = doc if isinstance(parent_doc, list): subfield = int(subfield) if is_following_spec and (subfield < 0 or subfield >= len(subspec)): is_following_spec = False if index == len(nested_field_list) - 1: return parent_doc, subfield if not isinstance(parent_doc, list): if subfield not in parent_doc: parent_doc[subfield] = {} if is_following_spec and subfield not in subspec: is_following_spec = False doc = parent_doc[subfield] if is_following_spec: subspec = subspec[subfield] def _expand_dots(self, doc): expanded = {} paths = {} for k, v in doc.items(): def _raise_incompatible(subkey): raise WriteError( "cannot infer query fields to set, both paths '%s' and '%s' are matched" % (k, paths[subkey])) if k in paths: _raise_incompatible(k) key_parts = k.split('.') sub_expanded = expanded paths[k] = k for i, key_part in enumerate(key_parts[:-1]): if key_part not in sub_expanded: sub_expanded[key_part] = {} sub_expanded = sub_expanded[key_part] key = '.'.join(key_parts[:i + 1]) if not isinstance(sub_expanded, dict): _raise_incompatible(key) paths[key] = k sub_expanded[key_parts[-1]] = v return expanded def _discard_operators(self, doc): if not doc or not isinstance(doc, dict): return doc, False new_doc = OrderedDict() for k, v in doc.items(): if k == '$eq': return v, False if k.startswith('$'): continue new_v, discarded = self._discard_operators(v) if not discarded: new_doc[k] = new_v return new_doc, not bool(new_doc) def find(self, filter=None, projection=None, skip=0, limit=0, no_cursor_timeout=False, cursor_type=None, sort=None, allow_partial_results=False, oplog_replay=False, modifiers=None, batch_size=0, manipulate=True, collation=None, session=None, max_time_ms=None, allow_disk_use=False, **kwargs): spec = filter if spec is None: spec = {} validate_is_mapping('filter', spec) for kwarg, value in kwargs.items(): if value: raise OperationFailure("Unrecognized field '%s'" % kwarg) return Cursor(self, spec, sort, projection, skip, limit, collation=collation).max_time_ms(max_time_ms).allow_disk_use(allow_disk_use) def _get_dataset(self, spec, sort, fields, as_class): dataset = self._iter_documents(spec) if sort: for sort_key, sort_direction in reversed(sort): if sort_key == '$natural': if sort_direction < 0: dataset = iter(reversed(list(dataset))) continue if sort_key.startswith('$'): raise NotImplementedError( 'Sorting by {} is not implemented in mongomock yet'.format(sort_key)) dataset = iter(sorted( dataset, key=lambda x: filtering.resolve_sort_key(sort_key, x), reverse=sort_direction < 0)) for document in dataset: yield self._copy_only_fields(document, fields, as_class) def _extract_projection_operators(self, fields): """Removes and returns fields with projection operators.""" result = {} allowed_projection_operators = {'$elemMatch', '$slice'} for key, value in fields.items(): if isinstance(value, dict): for op in value: if op not in allowed_projection_operators: raise ValueError('Unsupported projection option: {}'.format(op)) result[key] = value for key in result: del fields[key] return result def _apply_projection_operators(self, ops, doc, doc_copy): """Applies projection operators to copied document.""" for field, op in ops.items(): if field not in doc_copy: if field in doc: # field was not copied yet (since we are in include mode) doc_copy[field] = doc[field] else: # field doesn't exist in original document, no work to do continue if '$slice' in op: if not isinstance(doc_copy[field], list): raise OperationFailure( 'Unsupported type {} for slicing operation: {}'.format( type(doc_copy[field]), op)) op_value = op['$slice'] slice_ = None if isinstance(op_value, list): if len(op_value) != 2: raise OperationFailure( 'Unsupported slice format {} for slicing operation: {}'.format( op_value, op)) skip, limit = op_value if skip < 0: skip = len(doc_copy[field]) + skip last = min(skip + limit, len(doc_copy[field])) slice_ = slice(skip, last) elif isinstance(op_value, int): count = op_value start = 0 end = len(doc_copy[field]) if count < 0: start = max(0, len(doc_copy[field]) + count) else: end = min(count, len(doc_copy[field])) slice_ = slice(start, end) if slice_: doc_copy[field] = doc_copy[field][slice_] else: raise OperationFailure( 'Unsupported slice value {} for slicing operation: {}'.format( op_value, op)) if '$elemMatch' in op: if isinstance(doc_copy[field], list): # find the first item that matches matched = False for item in doc_copy[field]: if filter_applies(op['$elemMatch'], item): matched = True doc_copy[field] = [item] break # nothing have matched if not matched: del doc_copy[field] else: # remove the field since there is nothing to iterate del doc_copy[field] def _copy_only_fields(self, doc, fields, container): """Copy only the specified fields.""" # https://pymongo.readthedocs.io/en/stable/migrate-to-pymongo4.html#collection-find-returns-entire-document-with-empty-projection if fields is None or not fields and helpers.PYMONGO_VERSION >= version.parse('4.0'): return _copy_field(doc, container) if not fields: fields = {'_id': 1} if not isinstance(fields, dict): fields = helpers.fields_list_to_dict(fields) # we can pass in something like {'_id':0, 'field':1}, so pull the id # value out and hang on to it until later id_value = fields.pop('_id', 1) # filter out fields with projection operators, we will take care of them later projection_operators = self._extract_projection_operators(fields) # other than the _id field, all fields must be either includes or # excludes, this can evaluate to 0 if len(set(list(fields.values()))) > 1: raise ValueError( 'You cannot currently mix including and excluding fields.') # if we have novalues passed in, make a doc_copy based on the # id_value if not fields: if id_value == 1: doc_copy = container() else: doc_copy = _copy_field(doc, container) else: doc_copy = _project_by_spec( doc, _combine_projection_spec(fields), is_include=list(fields.values())[0], container=container) # set the _id value if we requested it, otherwise remove it if id_value == 0: doc_copy.pop('_id', None) else: if '_id' in doc: doc_copy['_id'] = doc['_id'] fields['_id'] = id_value # put _id back in fields # time to apply the projection operators and put back their fields self._apply_projection_operators(projection_operators, doc, doc_copy) for field, op in projection_operators.items(): fields[field] = op return doc_copy def _update_document_fields(self, doc, fields, updater): """Implements the $set behavior on an existing document""" for k, v in fields.items(): self._update_document_single_field(doc, k, v, updater) def _update_document_fields_positional(self, doc, fields, spec, updater, subdocument=None): """Implements the $set behavior on an existing document""" for k, v in fields.items(): if '$' in k: field_name_parts = k.split('.') if not subdocument: current_doc = doc subspec = spec for part in field_name_parts[:-1]: if part == '$': subspec_dollar = subspec.get('$elemMatch', subspec) for item in current_doc: if filter_applies(subspec_dollar, item): current_doc = item break continue new_spec = {} for el in subspec: if el.startswith(part): if len(el.split('.')) > 1: new_spec['.'.join( el.split('.')[1:])] = subspec[el] else: new_spec = subspec[el] subspec = new_spec current_doc = current_doc[part] subdocument = current_doc if field_name_parts[-1] == '$' and isinstance(subdocument, list): for i, doc in enumerate(subdocument): subspec_dollar = subspec.get('$elemMatch', subspec) if filter_applies(subspec_dollar, doc): subdocument[i] = v break continue updater(subdocument, field_name_parts[-1], v) continue # otherwise, we handle it the standard way self._update_document_single_field(doc, k, v, updater) return subdocument def _update_document_fields_with_positional_awareness(self, existing_document, v, spec, updater, subdocument): positional = any('$' in key for key in v.keys()) if positional: return self._update_document_fields_positional( existing_document, v, spec, updater, subdocument) self._update_document_fields(existing_document, v, updater) return subdocument def _update_document_single_field(self, doc, field_name, field_value, updater): field_name_parts = field_name.split('.') for part in field_name_parts[:-1]: if isinstance(doc, list): try: if part == '$': doc = doc[0] else: doc = doc[int(part)] continue except ValueError: pass elif isinstance(doc, dict): if updater is _unset_updater and part not in doc: # If the parent doesn't exists, so does it child. return doc = doc.setdefault(part, {}) else: return field_name = field_name_parts[-1] updater(doc, field_name, field_value) def _iter_documents(self, filter): # Validate the filter even if no documents can be returned. if self._store.is_empty: filter_applies(filter, {}) return (document for document in list(self._store.documents) if filter_applies(filter, document)) def find_one(self, filter=None, *args, **kwargs): # pylint: disable=keyword-arg-before-vararg # Allow calling find_one with a non-dict argument that gets used as # the id for the query. if filter is None: filter = {} if not isinstance(filter, Mapping): filter = {'_id': filter} try: return next(self.find(filter, *args, **kwargs)) except StopIteration: return None def find_one_and_delete(self, filter, projection=None, sort=None, **kwargs): kwargs['remove'] = True validate_is_mapping('filter', filter) return self._find_and_modify(filter, projection, sort=sort, **kwargs) def find_one_and_replace(self, filter, replacement, projection=None, sort=None, upsert=False, return_document=ReturnDocument.BEFORE, **kwargs): validate_is_mapping('filter', filter) validate_ok_for_replace(replacement) return self._find_and_modify(filter, projection, replacement, upsert, sort, return_document, **kwargs) def find_one_and_update(self, filter, update, projection=None, sort=None, upsert=False, return_document=ReturnDocument.BEFORE, **kwargs): validate_is_mapping('filter', filter) validate_ok_for_update(update) return self._find_and_modify(filter, projection, update, upsert, sort, return_document, **kwargs) if helpers.PYMONGO_VERSION < version.parse('4.0'): def find_and_modify(self, query={}, update=None, upsert=False, sort=None, full_response=False, manipulate=False, fields=None, **kwargs): warnings.warn('find_and_modify is deprecated, use find_one_and_delete' ', find_one_and_replace, or find_one_and_update instead', DeprecationWarning, stacklevel=2) if 'projection' in kwargs: raise TypeError("find_and_modify() got an unexpected keyword argument 'projection'") return self._find_and_modify(query, update=update, upsert=upsert, sort=sort, projection=fields, **kwargs) def _find_and_modify(self, query, projection=None, update=None, upsert=False, sort=None, return_document=ReturnDocument.BEFORE, session=None, **kwargs): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') remove = kwargs.get('remove', False) if kwargs.get('new', False) and remove: # message from mongodb raise OperationFailure("remove and returnNew can't co-exist") if not (remove or update): raise ValueError('Must either update or remove') if remove and update: raise ValueError("Can't do both update and remove") old = self.find_one(query, projection=projection, sort=sort) if not old and not upsert: return if old and '_id' in old: query = {'_id': old['_id']} if remove: self.delete_one(query) else: updated = self._update(query, update, upsert) if updated['upserted']: query = {'_id': updated['upserted']} if return_document is ReturnDocument.AFTER or kwargs.get('new'): return self.find_one(query, projection) return old if helpers.PYMONGO_VERSION < version.parse('4.0'): def save(self, to_save, manipulate=True, check_keys=True, **kwargs): warnings.warn('save is deprecated. Use insert_one or replace_one ' 'instead', DeprecationWarning, stacklevel=2) validate_is_mutable_mapping('to_save', to_save) validate_write_concern_params(**kwargs) if '_id' not in to_save: return self.insert(to_save) self._update( {'_id': to_save['_id']}, to_save, True, manipulate, check_keys=True, **kwargs) return to_save.get('_id', None) def delete_one(self, filter, collation=None, hint=None, session=None): validate_is_mapping('filter', filter) return DeleteResult( self._delete(filter, collation=collation, hint=hint, session=session), True) def delete_many(self, filter, collation=None, hint=None, session=None): validate_is_mapping('filter', filter) return DeleteResult( self._delete(filter, collation=collation, hint=hint, multi=True, session=session), True) def _delete(self, filter, collation=None, hint=None, multi=False, session=None): if hint: raise NotImplementedError( 'The hint argument of delete is valid but has not been implemented in ' 'mongomock yet') if collation: raise_not_implemented( 'collation', 'The collation argument of delete is valid but has not been ' 'implemented in mongomock yet') if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') filter = helpers.patch_datetime_awareness_in_document(filter) if filter is None: filter = {} if not isinstance(filter, Mapping): filter = {'_id': filter} to_delete = list(self.find(filter)) deleted_count = 0 for doc in to_delete: doc_id = doc['_id'] if isinstance(doc_id, dict): doc_id = helpers.hashdict(doc_id) del self._store[doc_id] deleted_count += 1 if not multi: break return { 'connectionId': self.database.client._id, 'n': deleted_count, 'ok': 1.0, 'err': None, } if helpers.PYMONGO_VERSION < version.parse('4.0'): def remove(self, spec_or_id=None, multi=True, **kwargs): warnings.warn('remove is deprecated. Use delete_one or delete_many ' 'instead.', DeprecationWarning, stacklevel=2) validate_write_concern_params(**kwargs) return self._delete(spec_or_id, multi=multi) def count(self, filter=None, **kwargs): warnings.warn( 'count is deprecated. Use estimated_document_count or ' 'count_documents instead. Please note that $where must be replaced ' 'by $expr, $near must be replaced by $geoWithin with $center, and ' '$nearSphere must be replaced by $geoWithin with $centerSphere', DeprecationWarning, stacklevel=2) if kwargs.pop('session', None): raise_not_implemented('session', 'Mongomock does not handle sessions yet') if filter is None: return len(self._store) spec = helpers.patch_datetime_awareness_in_document(filter) return len(list(self._iter_documents(spec))) def count_documents(self, filter, **kwargs): if kwargs.pop('collation', None): raise_not_implemented( 'collation', 'The collation argument of count_documents is valid but has not been ' 'implemented in mongomock yet') if kwargs.pop('session', None): raise_not_implemented('session', 'Mongomock does not handle sessions yet') skip = kwargs.pop('skip', 0) if 'limit' in kwargs: limit = kwargs.pop('limit') if not isinstance(limit, (int, float)): raise OperationFailure('the limit must be specified as a number') if limit <= 0: raise OperationFailure('the limit must be positive') limit = math.floor(limit) else: limit = None unknown_kwargs = set(kwargs) - {'maxTimeMS', 'hint'} if unknown_kwargs: raise OperationFailure("unrecognized field '%s'" % unknown_kwargs.pop()) spec = helpers.patch_datetime_awareness_in_document(filter) doc_num = len(list(self._iter_documents(spec))) count = max(doc_num - skip, 0) return count if limit is None else min(count, limit) def estimated_document_count(self, **kwargs): if kwargs.pop('session', None): raise ConfigurationError('estimated_document_count does not support sessions') unknown_kwargs = set(kwargs) - {'limit', 'maxTimeMS', 'hint'} if self.database.client.server_info()['versionArray'] < [5]: unknown_kwargs.discard('skip') if unknown_kwargs: raise OperationFailure( "BSON field 'count.%s' is an unknown field." % list(unknown_kwargs)[0]) return self.count_documents({}, **kwargs) def drop(self, session=None): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') self.database.drop_collection(self.name) if helpers.PYMONGO_VERSION < version.parse('4.0'): def ensure_index(self, key_or_list, cache_for=300, **kwargs): return self.create_index(key_or_list, cache_for, **kwargs) def create_index(self, key_or_list, cache_for=300, session=None, **kwargs): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') index_list = helpers.create_index_list(key_or_list) is_unique = kwargs.pop('unique', False) is_sparse = kwargs.pop('sparse', False) index_name = kwargs.pop('name', helpers.gen_index_name(index_list)) index_dict = {'key': index_list} if is_sparse: index_dict['sparse'] = True if is_unique: index_dict['unique'] = True if 'expireAfterSeconds' in kwargs and kwargs['expireAfterSeconds'] is not None: index_dict['expireAfterSeconds'] = kwargs.pop('expireAfterSeconds') if 'partialFilterExpression' in kwargs and kwargs['partialFilterExpression'] is not None: index_dict['partialFilterExpression'] = kwargs.pop('partialFilterExpression') existing_index = self._store.indexes.get(index_name) if existing_index and index_dict != existing_index: raise OperationFailure( 'Index with name: %s already exists with different options' % index_name) # Check that documents already verify the uniquess of this new index. if is_unique: indexed = set() indexed_list = [] documents_gen = self._store.documents for doc in documents_gen: index = [] for key, unused_order in index_list: try: index.append(helpers.get_value_by_dot(doc, key)) except KeyError: if is_sparse: continue index.append(None) if is_sparse and not index: continue index = tuple(index) try: if index in indexed: # Need to throw this inside the generator so it can clean the locks documents_gen.throw( DuplicateKeyError('E11000 Duplicate Key Error', 11000), None, None) indexed.add(index) except TypeError as err: # index is not hashable. if index in indexed_list: documents_gen.throw( DuplicateKeyError('E11000 Duplicate Key Error', 11000), None, err) indexed_list.append(index) self._store.create_index(index_name, index_dict) return index_name def create_indexes(self, indexes, session=None): for index in indexes: if not isinstance(index, IndexModel): raise TypeError( '%s is not an instance of pymongo.operations.IndexModel' % index) return [ self.create_index( index.document['key'].items(), session=session, expireAfterSeconds=index.document.get('expireAfterSeconds'), unique=index.document.get('unique', False), sparse=index.document.get('sparse', False), name=index.document.get('name')) for index in indexes ] def drop_index(self, index_or_name, session=None): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') if isinstance(index_or_name, list): name = helpers.gen_index_name(index_or_name) else: name = index_or_name try: self._store.drop_index(name) except KeyError as err: raise OperationFailure('index not found with name [%s]' % name) from err def drop_indexes(self, session=None): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') self._store.indexes = {} if helpers.PYMONGO_VERSION < version.parse('4.0'): def reindex(self, session=None): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') def _list_all_indexes(self): if not self._store.is_created: return yield '_id_', {'key': [('_id', 1)]} for name, information in self._store.indexes.items(): yield name, information def list_indexes(self, session=None): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') for name, information in self._list_all_indexes(): yield dict( information, key=dict(information['key']), name=name, v=2) def index_information(self, session=None): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') return { name: dict(index, v=2) for name, index in self._list_all_indexes() } if helpers.PYMONGO_VERSION < version.parse('4.0'): def map_reduce(self, map_func, reduce_func, out, full_response=False, query=None, limit=0, session=None): if execjs is None: raise NotImplementedError( 'PyExecJS is required in order to run Map-Reduce. ' "Use 'pip install pyexecjs pymongo' to support Map-Reduce mock." ) if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') if limit == 0: limit = None start_time = time.perf_counter() out_collection = None reduced_rows = None full_dict = { 'counts': { 'input': 0, 'reduce': 0, 'emit': 0, 'output': 0}, 'timeMillis': 0, 'ok': 1.0, 'result': None} map_ctx = execjs.compile(''' function doMap(fnc, docList) { var mappedDict = {}; function emit(key, val) { if (key['$oid']) { mapped_key = '$oid' + key['$oid']; } else { mapped_key = key; } if(!mappedDict[mapped_key]) { mappedDict[mapped_key] = []; } mappedDict[mapped_key].push(val); } mapper = eval('('+fnc+')'); var mappedList = new Array(); for(var i=0; i 1: full_dict['counts']['reduce'] += 1 full_dict['counts']['output'] = len(reduced_rows) if isinstance(out, (str, bytes)): out_collection = getattr(self.database, out) out_collection.drop() out_collection.insert(reduced_rows) ret_val = out_collection full_dict['result'] = out elif isinstance(out, SON) and out.get('replace') and out.get('db'): # Must be of the format SON([('replace','results'),('db','outdb')]) out_db = getattr(self.database._client, out['db']) out_collection = getattr(out_db, out['replace']) out_collection.insert(reduced_rows) ret_val = out_collection full_dict['result'] = {'db': out['db'], 'collection': out['replace']} elif isinstance(out, dict) and out.get('inline'): ret_val = reduced_rows full_dict['result'] = reduced_rows else: raise TypeError("'out' must be an instance of string, dict or bson.SON") time_millis = (time.perf_counter() - start_time) * 1000 full_dict['timeMillis'] = int(round(time_millis)) if full_response: ret_val = full_dict return ret_val def inline_map_reduce(self, map_func, reduce_func, full_response=False, query=None, limit=0, session=None): return self.map_reduce( map_func, reduce_func, {'inline': 1}, full_response, query, limit, session=session) def distinct(self, key, filter=None, session=None): if session: raise_not_implemented('session', 'Mongomock does not handle sessions yet') return self.find(filter).distinct(key) if helpers.PYMONGO_VERSION < version.parse('4.0'): def group(self, key, condition, initial, reduce, finalize=None): if helpers.PYMONGO_VERSION >= version.parse('3.6'): raise OperationFailure("no such command: 'group'") if execjs is None: raise NotImplementedError( 'PyExecJS is required in order to use group. ' "Use 'pip install pyexecjs pymongo' to support group mock." ) reduce_ctx = execjs.compile(''' function doReduce(fnc, docList) { reducer = eval('('+fnc+')'); for(var i=0, l=docList.length; i 0: doc += [None] * len_diff doc[field_index] = value def _unset_updater(doc, field_name, value): if isinstance(doc, dict): doc.pop(field_name, None) def _inc_updater(doc, field_name, value): if isinstance(doc, dict): doc[field_name] = doc.get(field_name, 0) + value if isinstance(doc, list): field_index = int(field_name) if field_index < 0: raise WriteError('Negative index provided') try: doc[field_index] += value except IndexError: len_diff = field_index - (len(doc) - 1) doc += [None] * len_diff doc[field_index] = value def _max_updater(doc, field_name, value): if isinstance(doc, dict): doc[field_name] = max(doc.get(field_name, value), value) def _min_updater(doc, field_name, value): if isinstance(doc, dict): doc[field_name] = min(doc.get(field_name, value), value) def _pop_updater(doc, field_name, value): if value not in {1, -1}: raise WriteError('$pop expects 1 or -1, found: ' + str(value)) if isinstance(doc, dict): if isinstance(doc[field_name], (tuple, list)): doc[field_name] = list(doc[field_name]) _pop_from_list(doc[field_name], value) return raise WriteError('Path contains element of non-array type') if isinstance(doc, list): field_index = int(field_name) if field_index < 0: raise WriteError('Negative index provided') if field_index >= len(doc): return _pop_from_list(doc[field_index], value) def _pop_from_list(list_instance, mongo_pop_value): if not list_instance: return if mongo_pop_value == 1: list_instance.pop() elif mongo_pop_value == -1: list_instance.pop(0) def _current_date_updater(doc, field_name, value): if isinstance(doc, dict): if value == {'$type': 'timestamp'}: # TODO(juannyg): get_current_timestamp should also be using helpers utcnow, # as it currently using time.time internally doc[field_name] = helpers.get_current_timestamp() else: doc[field_name] = mongomock.utcnow() _updaters = { '$set': _set_updater, '$unset': _unset_updater, '$inc': _inc_updater, '$max': _max_updater, '$min': _min_updater, '$pop': _pop_updater } mongomock-4.1.3/mongomock/command_cursor.py000066400000000000000000000014341456276026200211020ustar00rootroot00000000000000 class CommandCursor(object): def __init__(self, collection, curser_info=None, address=None, retrieved=0): self._collection = iter(collection) self._id = None self._address = address self._data = {} self._retrieved = retrieved self._batch_size = 0 self._killed = (self._id == 0) @property def address(self): return self._address def close(self): pass def batch_size(self, batch_size): return self @property def alive(self): return True def __iter__(self): return self def next(self): return next(self._collection) __next__ = next def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): return mongomock-4.1.3/mongomock/database.py000066400000000000000000000233251456276026200176360ustar00rootroot00000000000000from packaging import version import warnings from . import CollectionInvalid from . import InvalidName from . import OperationFailure from .collection import Collection from .filtering import filter_applies from mongomock import codec_options as mongomock_codec_options from mongomock import helpers from mongomock import read_preferences from mongomock import store try: from pymongo import ReadPreference _READ_PREFERENCE_PRIMARY = ReadPreference.PRIMARY except ImportError: _READ_PREFERENCE_PRIMARY = read_preferences.PRIMARY try: from pymongo.read_concern import ReadConcern except ImportError: from .read_concern import ReadConcern _LIST_COLLECTION_FILTER_ALLOWED_OPERATORS = frozenset(['$regex', '$eq', '$ne']) def _verify_list_collection_supported_op(keys): if set(keys) - _LIST_COLLECTION_FILTER_ALLOWED_OPERATORS: raise NotImplementedError( 'list collection names filter operator {0} is not implemented yet in mongomock ' 'allowed operators are {1}'.format(keys, _LIST_COLLECTION_FILTER_ALLOWED_OPERATORS)) class Database(object): def __init__( self, client, name, _store, read_preference=None, codec_options=None, read_concern=None ): self.name = name self._client = client self._collection_accesses = {} self._store = _store or store.DatabaseStore() self._read_preference = read_preference or _READ_PREFERENCE_PRIMARY mongomock_codec_options.is_supported(codec_options) self._codec_options = codec_options or mongomock_codec_options.CodecOptions() if read_concern and not isinstance(read_concern, ReadConcern): raise TypeError('read_concern must be an instance of pymongo.read_concern.ReadConcern') self._read_concern = read_concern or ReadConcern() def __getitem__(self, coll_name): return self.get_collection(coll_name) def __getattr__(self, attr): if attr.startswith('_'): raise AttributeError( "%s has no attribute '%s'. To access the %s collection, use database['%s']." % (self.__class__.__name__, attr, attr, attr)) return self[attr] def __repr__(self): return "Database({0}, '{1}')".format(self._client, self.name) def __eq__(self, other): if isinstance(other, self.__class__): return self._client == other._client and self.name == other.name return NotImplemented if helpers.PYMONGO_VERSION >= version.parse('3.12'): def __hash__(self): return hash((self._client, self.name)) @property def client(self): return self._client @property def read_preference(self): return self._read_preference @property def codec_options(self): return self._codec_options @property def read_concern(self): return self._read_concern def _get_created_collections(self): return self._store.list_created_collection_names() if helpers.PYMONGO_VERSION < version.parse('4.0'): def collection_names(self, include_system_collections=True, session=None): warnings.warn('collection_names is deprecated. Use list_collection_names instead.') if include_system_collections: return list(self._get_created_collections()) return self.list_collection_names(session=session) def list_collections(self, filter=None, session=None, nameOnly=False): raise NotImplementedError( 'list_collections is a valid method of Database but has not been implemented in ' 'mongomock yet.') def list_collection_names(self, filter=None, session=None): """filter: only name field type with eq,ne or regex operator session: not supported for supported operator please see _LIST_COLLECTION_FILTER_ALLOWED_OPERATORS """ field_name = 'name' if session: raise NotImplementedError('Mongomock does not handle sessions yet') if filter: if not filter.get('name'): raise NotImplementedError('list collection {0} might be valid but is not ' 'implemented yet in mongomock'.format(filter)) filter = {field_name: {'$eq': filter.get(field_name)}} \ if isinstance(filter.get(field_name), str) else filter _verify_list_collection_supported_op(filter.get(field_name).keys()) return [ name for name in list(self._store._collections) if filter_applies(filter, {field_name: name}) and not name.startswith('system.') ] return [ name for name in self._get_created_collections() if not name.startswith('system.') ] def get_collection(self, name, codec_options=None, read_preference=None, write_concern=None, read_concern=None): if read_preference is not None: read_preferences.ensure_read_preference_type('read_preference', read_preference) mongomock_codec_options.is_supported(codec_options) try: return self._collection_accesses[name].with_options( codec_options=codec_options or self._codec_options, read_preference=read_preference or self.read_preference, read_concern=read_concern, write_concern=write_concern) except KeyError: self._ensure_valid_collection_name(name) collection = self._collection_accesses[name] = Collection( self, name=name, read_concern=read_concern, write_concern=write_concern, read_preference=read_preference or self.read_preference, codec_options=codec_options or self._codec_options, _db_store=self._store, ) return collection def drop_collection(self, name_or_collection, session=None): if session: raise NotImplementedError('Mongomock does not handle sessions yet') if isinstance(name_or_collection, Collection): name_or_collection._store.drop() else: self._store[name_or_collection].drop() def _ensure_valid_collection_name(self, name): # These are the same checks that are done in pymongo. if not isinstance(name, str): raise TypeError('name must be an instance of str') if not name or '..' in name: raise InvalidName('collection names cannot be empty') if name[0] == '.' or name[-1] == '.': raise InvalidName("collection names must not start or end with '.'") if '$' in name: raise InvalidName("collection names must not contain '$'") if '\x00' in name: raise InvalidName('collection names must not contain the null character') def create_collection(self, name, **kwargs): self._ensure_valid_collection_name(name) if name in self.list_collection_names(): raise CollectionInvalid('collection %s already exists' % name) if kwargs: raise NotImplementedError('Special options not supported') self._store.create_collection(name) return self[name] def rename_collection(self, name, new_name, dropTarget=False): """Changes the name of an existing collection.""" self._ensure_valid_collection_name(new_name) # Reference for server implementation: # https://docs.mongodb.com/manual/reference/command/renameCollection/ if not self._store[name].is_created: raise OperationFailure( 'The collection "{0}" does not exist.'.format(name), 10026) if new_name in self._store: if dropTarget: self.drop_collection(new_name) else: raise OperationFailure( 'The target collection "{0}" already exists'.format(new_name), 10027) self._store.rename(name, new_name) return {'ok': 1} def dereference(self, dbref, session=None): if session: raise NotImplementedError('Mongomock does not handle sessions yet') if not hasattr(dbref, 'collection') or not hasattr(dbref, 'id'): raise TypeError('cannot dereference a %s' % type(dbref)) if dbref.database is not None and dbref.database != self.name: raise ValueError('trying to dereference a DBRef that points to ' 'another database (%r not %r)' % (dbref.database, self.name)) return self[dbref.collection].find_one({'_id': dbref.id}) def command(self, command, **unused_kwargs): if isinstance(command, str): command = {command: 1} if 'ping' in command: return {'ok': 1.} # TODO(pascal): Differentiate NotImplementedError for valid commands # and OperationFailure if the command is not valid. raise NotImplementedError( 'command is a valid Database method but is not implemented in Mongomock yet') def with_options( self, codec_options=None, read_preference=None, write_concern=None, read_concern=None): mongomock_codec_options.is_supported(codec_options) if write_concern: raise NotImplementedError( 'write_concern is a valid parameter for with_options but is not implemented yet in ' 'mongomock') if read_preference is None or read_preference == self._read_preference: return self return Database( self._client, self.name, self._store, read_preference=read_preference or self._read_preference, codec_options=codec_options or self._codec_options, read_concern=read_concern or self._read_concern, ) mongomock-4.1.3/mongomock/filtering.py000066400000000000000000000461161456276026200200600ustar00rootroot00000000000000from datetime import datetime import itertools import uuid from .helpers import ObjectId, RE_TYPE from . import OperationFailure import numbers import operator import re from sentinels import NOTHING try: from types import NoneType except ImportError: NoneType = type(None) try: from bson import Regex, DBRef _RE_TYPES = (RE_TYPE, Regex) except ImportError: DBRef = None _RE_TYPES = (RE_TYPE,) try: from bson.decimal128 import Decimal128 except ImportError: Decimal128 = None _TOP_LEVEL_OPERATORS = {'$expr', '$text', '$where', '$jsonSchema'} _NOT_IMPLEMENTED_OPERATORS = { '$bitsAllClear', '$bitsAllSet', '$bitsAnyClear', '$bitsAnySet', '$geoIntersects', '$geoWithin', '$maxDistance', '$minDistance', '$near', '$nearSphere', } def filter_applies(search_filter, document): """Applies given filter This function implements MongoDB's matching strategy over documents in the find() method and other related scenarios (like $elemMatch) """ return _filterer_inst.apply(search_filter, document) class _Filterer(object): """An object to help applying a filter, using the MongoDB query language.""" # This is populated using register_parse_expression further down. parse_expression = [] def __init__(self): self._operator_map = dict({ '$eq': _list_expand(operator_eq), '$ne': _list_expand(lambda dv, sv: not operator_eq(dv, sv), negative=True), '$all': self._all_op, '$in': _in_op, '$nin': lambda dv, sv: not _in_op(dv, sv), '$exists': lambda dv, sv: bool(sv) == (dv is not NOTHING), '$regex': _not_nothing_and(_regex), '$elemMatch': self._elem_match_op, '$size': _size_op, '$type': _type_op }, **{ key: _not_nothing_and(_list_expand(_compare_objects(op))) for key, op in SORTING_OPERATOR_MAP.items() }) def apply(self, search_filter, document): if not isinstance(search_filter, dict): raise OperationFailure('the match filter must be an expression in an object') for key, search in search_filter.items(): # Top level operators. if key == '$comment': continue if key in LOGICAL_OPERATOR_MAP: if not search: raise OperationFailure('BadValue $and/$or/$nor must be a nonempty array') if not LOGICAL_OPERATOR_MAP[key](document, search, self.apply): return False continue if key == '$expr': parse_expression = self.parse_expression[0] if not parse_expression(search, document, ignore_missing_keys=True): return False continue if key in _TOP_LEVEL_OPERATORS: raise NotImplementedError( 'The {} operator is not implemented in mongomock yet'.format(key)) if key.startswith('$'): raise OperationFailure('unknown top level operator: ' + key) is_match = False is_checking_negative_match = \ isinstance(search, dict) and {'$ne', '$nin'} & set(search.keys()) is_checking_positive_match = \ not isinstance(search, dict) or (set(search.keys()) - {'$ne', '$nin'}) has_candidates = False if search == {'$exists': False} and not iter_key_candidates(key, document): continue if isinstance(search, dict) and '$all' in search: if not self._all_op(iter_key_candidates(key, document), search['$all']): return False # if there are no query operators then continue if len(search) == 1: continue for doc_val in iter_key_candidates(key, document): has_candidates |= doc_val is not NOTHING is_ops_filter = search and isinstance(search, dict) and \ all(key.startswith('$') for key in search.keys()) if is_ops_filter: if '$options' in search and '$regex' in search: search = _combine_regex_options(search) unknown_operators = set(search) - set(self._operator_map) - {'$not'} if unknown_operators: not_implemented_operators = unknown_operators & _NOT_IMPLEMENTED_OPERATORS if not_implemented_operators: raise NotImplementedError( "'%s' is a valid operation but it is not supported by Mongomock " 'yet.' % list(not_implemented_operators)[0]) raise OperationFailure('unknown operator: ' + list(unknown_operators)[0]) is_match = all( operator_string in self._operator_map and self._operator_map[operator_string](doc_val, search_val) or operator_string == '$not' and self._not_op(document, key, search_val) for operator_string, search_val in search.items() ) and search elif isinstance(search, _RE_TYPES) and isinstance(doc_val, (str, list)): is_match = _regex(doc_val, search) elif key in LOGICAL_OPERATOR_MAP: if not search: raise OperationFailure('BadValue $and/$or/$nor must be a nonempty array') is_match = LOGICAL_OPERATOR_MAP[key](document, search, self.apply) elif isinstance(doc_val, (list, tuple)): is_match = (search in doc_val or search == doc_val) if isinstance(search, ObjectId): is_match |= (str(search) in doc_val) else: is_match = (doc_val == search) or (search is None and doc_val is NOTHING) # When checking negative match, all the elements should match. if is_checking_negative_match and not is_match: return False # If not checking negative matches, the first match is enouh for this criteria. if is_match and not is_checking_negative_match: break if not is_match and (has_candidates or is_checking_positive_match): return False return True def _not_op(self, d, k, s): if isinstance(s, dict): for key in s.keys(): if key not in self._operator_map and key not in LOGICAL_OPERATOR_MAP: raise OperationFailure('unknown operator: %s' % key) elif isinstance(s, _RE_TYPES): pass else: raise OperationFailure('$not needs a regex or a document') return not self.apply({k: s}, d) def _elem_match_op(self, doc_val, query): if not isinstance(doc_val, list): return False if not isinstance(query, dict): raise OperationFailure('$elemMatch needs an Object') for item in doc_val: try: if self.apply(query, item): return True except OperationFailure: if self.apply({'field': query}, {'field': item}): return True return False def _all_op(self, doc_val, search_val): if isinstance(doc_val, list) and doc_val and isinstance(doc_val[0], list): doc_val = list(itertools.chain.from_iterable(doc_val)) dv = _force_list(doc_val) matches = [] for x in search_val: if isinstance(x, dict) and '$elemMatch' in x: matches.append(self._elem_match_op(doc_val, x['$elemMatch'])) else: matches.append(x in dv) return all(matches) def iter_key_candidates(key, doc): """Get possible subdocuments or lists that are referred to by the key in question Returns the appropriate nested value if the key includes dot notation. """ if not key: return [doc] if doc is None: return () if isinstance(doc, list): return _iter_key_candidates_sublist(key, doc) if not isinstance(doc, dict): return () key_parts = key.split('.') if len(key_parts) == 1: return [doc.get(key, NOTHING)] sub_key = '.'.join(key_parts[1:]) sub_doc = doc.get(key_parts[0], {}) return iter_key_candidates(sub_key, sub_doc) def _iter_key_candidates_sublist(key, doc): """Iterates of candidates :param doc: a list to be searched for candidates for our key :param key: the string key to be matched """ key_parts = key.split('.') sub_key = key_parts.pop(0) key_remainder = '.'.join(key_parts) try: sub_key_int = int(sub_key) except ValueError: sub_key_int = None if sub_key_int is None: # subkey is not an integer... ret = [] for sub_doc in doc: if isinstance(sub_doc, dict): if sub_key in sub_doc: ret.extend(iter_key_candidates(key_remainder, sub_doc[sub_key])) else: ret.append(NOTHING) return ret # subkey is an index if sub_key_int >= len(doc): return () # dead end sub_doc = doc[sub_key_int] if key_parts: return iter_key_candidates('.'.join(key_parts), sub_doc) return [sub_doc] def _force_list(v): return v if isinstance(v, (list, tuple)) else [v] def _in_op(doc_val, search_val): if not isinstance(search_val, (list, tuple)): raise OperationFailure('$in needs an array') if doc_val is NOTHING and None in search_val: return True doc_val = _force_list(doc_val) is_regex_list = [isinstance(x, _RE_TYPES) for x in search_val] if not any(is_regex_list): return any(x in search_val for x in doc_val) for x, is_regex in zip(search_val, is_regex_list): if (is_regex and _regex(doc_val, x)) or (x in doc_val): return True return False def _not_nothing_and(f): """wrap an operator to return False if the first arg is NOTHING""" return lambda v, l: v is not NOTHING and f(v, l) def _compare_objects(op): """Wrap an operator to also compare objects following BSON comparison. See https://docs.mongodb.com/manual/reference/bson-type-comparison-order/#objects """ def _wrapped(a, b): # Do not compare uncomparable types, see Type Bracketing: # https://docs.mongodb.com/manual/reference/method/db.collection.find/#type-bracketing return bson_compare(op, a, b, can_compare_types=False) return _wrapped def bson_compare(op, a, b, can_compare_types=True): """Compare two elements using BSON comparison. Args: op: the basic operation to compare (e.g. operator.lt, operator.ge). a: the first operand b: the second operand can_compare_types: if True, according to BSON's definition order between types is used, otherwise always return False when types are different. """ a_type = _get_compare_type(a) b_type = _get_compare_type(b) if a_type != b_type: return can_compare_types and op(a_type, b_type) # Compare DBRefs as dicts if type(a).__name__ == 'DBRef' and hasattr(a, 'as_doc'): a = a.as_doc() if type(b).__name__ == 'DBRef' and hasattr(b, 'as_doc'): b = b.as_doc() if isinstance(a, dict): # MongoDb server compares the type before comparing the keys # https://github.com/mongodb/mongo/blob/f10f214/src/mongo/bson/bsonelement.cpp#L516 # even though the documentation does not say anything about that. a = [(_get_compare_type(v), k, v) for k, v in a.items()] b = [(_get_compare_type(v), k, v) for k, v in b.items()] if isinstance(a, (tuple, list)): for item_a, item_b in zip(a, b): if item_a != item_b: return bson_compare(op, item_a, item_b) return bson_compare(op, len(a), len(b)) if isinstance(a, NoneType): return op(0, 0) # bson handles bytes as binary in python3+: # https://api.mongodb.com/python/current/api/bson/index.html if isinstance(a, bytes): # Performs the same operation as described by: # https://docs.mongodb.com/manual/reference/bson-type-comparison-order/#bindata if len(a) != len(b): return op(len(a), len(b)) # bytes is always treated as subtype 0 by the bson library return op(a, b) def _get_compare_type(val): """Get a number representing the base type of the value used for comparison. See https://docs.mongodb.com/manual/reference/bson-type-comparison-order/ also https://github.com/mongodb/mongo/blob/46b28bb/src/mongo/bson/bsontypes.h#L175 for canonical values. """ if isinstance(val, NoneType): return 5 if isinstance(val, bool): return 40 if isinstance(val, numbers.Number): return 10 if isinstance(val, str): return 15 if isinstance(val, dict): return 20 if isinstance(val, (tuple, list)): return 25 if isinstance(val, uuid.UUID): return 30 if isinstance(val, bytes): return 30 if isinstance(val, ObjectId): return 35 if isinstance(val, datetime): return 45 if isinstance(val, _RE_TYPES): return 50 if DBRef and isinstance(val, DBRef): # According to the C++ code, this should be 55 but apparently sending a DBRef through # pymongo is stored as a dict. return 20 raise NotImplementedError( "Mongomock does not know how to sort '%s' of type '%s'" % (val, type(val))) def _regex(doc_val, regex): if not (isinstance(doc_val, (str, list)) or isinstance(doc_val, RE_TYPE)): return False if isinstance(regex, str): regex = re.compile(regex) if not isinstance(regex, RE_TYPE): # bson.Regex regex = regex.try_compile() return any( regex.search(item) for item in _force_list(doc_val) if isinstance(item, str)) def _size_op(doc_val, search_val): if isinstance(doc_val, (list, tuple, dict)): return search_val == len(doc_val) return search_val == 1 if doc_val and doc_val is not NOTHING else 0 def _list_expand(f, negative=False): def func(doc_val, search_val): if isinstance(doc_val, (list, tuple)) and not isinstance(search_val, (list, tuple)): if negative: return all(f(val, search_val) for val in doc_val) return any(f(val, search_val) for val in doc_val) return f(doc_val, search_val) return func def _type_op(doc_val, search_val, in_array=False): if search_val not in TYPE_MAP: raise OperationFailure('%r is not a valid $type' % search_val) elif TYPE_MAP[search_val] is None: raise NotImplementedError('%s is a valid $type but not implemented' % search_val) if TYPE_MAP[search_val](doc_val): return True if isinstance(doc_val, (list, tuple)) and not in_array: return any(_type_op(val, search_val, in_array=True) for val in doc_val) return False def _combine_regex_options(search): if not isinstance(search['$options'], str): raise OperationFailure('$options has to be a string') options = None for option in search['$options']: if option not in 'imxs': continue re_option = getattr(re, option.upper()) if options is None: options = re_option else: options |= re_option search_copy = dict(search) del search_copy['$options'] if options is None: return search_copy if isinstance(search['$regex'], _RE_TYPES): if isinstance(search['$regex'], RE_TYPE): search_copy['$regex'] = re.compile( search['$regex'].pattern, search['$regex'].flags | options) else: # bson.Regex regex = search['$regex'] search_copy['$regex'] = regex.__class__(regex.pattern, regex.flags | options) else: search_copy['$regex'] = re.compile(search['$regex'], options) return search_copy def operator_eq(doc_val, search_val): if doc_val is NOTHING and search_val is None: return True return operator.eq(doc_val, search_val) SORTING_OPERATOR_MAP = { '$gt': operator.gt, '$gte': operator.ge, '$lt': operator.lt, '$lte': operator.le, } LOGICAL_OPERATOR_MAP = { '$or': lambda d, subq, filter_func: any(filter_func(q, d) for q in subq), '$and': lambda d, subq, filter_func: all(filter_func(q, d) for q in subq), '$nor': lambda d, subq, filter_func: all(not filter_func(q, d) for q in subq), '$not': lambda d, subq, filter_func: (not filter_func(q, d) for q in subq), } TYPE_MAP = { 'double': lambda v: isinstance(v, float), 'string': lambda v: isinstance(v, str), 'object': lambda v: isinstance(v, dict), 'array': lambda v: isinstance(v, list), 'binData': lambda v: isinstance(v, bytes), 'undefined': None, 'objectId': lambda v: isinstance(v, ObjectId), 'bool': lambda v: isinstance(v, bool), 'date': lambda v: isinstance(v, datetime), 'null': None, 'regex': None, 'dbPointer': None, 'javascript': None, 'symbol': None, 'javascriptWithScope': None, 'int': lambda v: ( isinstance(v, int) and not isinstance(v, bool) and v.bit_length() <= 32 ), 'timestamp': None, 'long': lambda v: ( isinstance(v, int) and not isinstance(v, bool) and v.bit_length() > 32 ), 'decimal': (lambda v: isinstance(v, Decimal128)) if Decimal128 else None, 'number': lambda v: ( # pylint: disable-next=isinstance-second-argument-not-valid-type isinstance(v, (int, float) + ((Decimal128,) if Decimal128 else ())) and not isinstance(v, bool) ), 'minKey': None, 'maxKey': None, } def resolve_key(key, doc): return next(iter(iter_key_candidates(key, doc)), NOTHING) def resolve_sort_key(key, doc): value = resolve_key(key, doc) # see http://docs.mongodb.org/manual/reference/method/cursor.sort/#ascending-descending-sort if value is NOTHING: return 1, BsonComparable(None) # List or tuples are sorted solely by their first value. if isinstance(value, (tuple, list)): if not value: return 0, BsonComparable(None) return 1, BsonComparable(value[0]) return 1, BsonComparable(value) class BsonComparable(object): """Wraps a value in an BSON like object that can be compared one to another.""" def __init__(self, obj): self.obj = obj def __lt__(self, other): return bson_compare(operator.lt, self.obj, other.obj) _filterer_inst = _Filterer() # Developer note: to avoid a cross-modules dependency (filtering requires aggregation, that requires # filtering), the aggregation module needs to register its parse_expression function here. def register_parse_expression(parse_expression): """Register the parse_expression function from the aggregate module.""" del _Filterer.parse_expression[:] _Filterer.parse_expression.append(parse_expression) mongomock-4.1.3/mongomock/gridfs.py000066400000000000000000000044531456276026200173510ustar00rootroot00000000000000from unittest import mock from mongomock import Database as MongoMockDatabase, Collection as MongoMockCollection from mongomock.collection import Cursor as MongoMockCursor try: from pymongo.collection import Collection as PyMongoCollection from pymongo.database import Database as PyMongoDatabase from gridfs.grid_file import GridOut as PyMongoGridOut, GridOutCursor as PyMongoGridOutCursor _HAVE_PYMONGO = True except ImportError: _HAVE_PYMONGO = False # This is a copy of GridOutCursor but with a different base. Note that we # need both classes as one might want to access both mongomock and real # MongoDb. class _MongoMockGridOutCursor(MongoMockCursor): def __init__(self, collection, *args, **kwargs): self.__root_collection = collection super(_MongoMockGridOutCursor, self).__init__(collection.files, *args, **kwargs) def next(self): next_file = super(_MongoMockGridOutCursor, self).next() return PyMongoGridOut( self.__root_collection, file_document=next_file, session=self.session) __next__ = next def add_option(self, *args, **kwargs): raise NotImplementedError() def remove_option(self, *args, **kwargs): raise NotImplementedError() def _clone_base(self, session): return _MongoMockGridOutCursor(self.__root_collection, session=session) def _create_grid_out_cursor(collection, *args, **kwargs): if isinstance(collection, MongoMockCollection): return _MongoMockGridOutCursor(collection, *args, **kwargs) return PyMongoGridOutCursor(collection, *args, **kwargs) def enable_gridfs_integration(): """This function enables the use of mongomock Database's and Collection's inside gridfs Gridfs library use `isinstance` to make sure the passed elements are valid `pymongo.Database/Collection` so we monkey patch those types in the gridfs modules (luckily in the modules they are used, they are only used with isinstance). """ if not _HAVE_PYMONGO: raise NotImplementedError('gridfs mocking requires pymongo to work') mock.patch('gridfs.Database', (PyMongoDatabase, MongoMockDatabase)).start() mock.patch('gridfs.grid_file.Collection', (PyMongoCollection, MongoMockCollection)).start() mock.patch('gridfs.GridOutCursor', _create_grid_out_cursor).start() mongomock-4.1.3/mongomock/helpers.py000066400000000000000000000331061456276026200175320ustar00rootroot00000000000000from collections import abc from collections import OrderedDict from datetime import datetime, timedelta, tzinfo from mongomock import InvalidURI from packaging import version import re import time from urllib.parse import unquote_plus import warnings # Get ObjectId from bson if available or import a crafted one. This is not used # in this module but is made available for callers of this module. try: from bson import ObjectId # pylint: disable=unused-import from bson import Timestamp from pymongo import version as pymongo_version PYMONGO_VERSION = version.parse(pymongo_version) HAVE_PYMONGO = True except ImportError: from mongomock.object_id import ObjectId # noqa Timestamp = None # Default Pymongo version if not present. PYMONGO_VERSION = version.parse('4.0') HAVE_PYMONGO = False # Cache the RegExp pattern type. RE_TYPE = type(re.compile('')) _HOST_MATCH = re.compile(r'^([^@]+@)?([^:]+|\[[^\]]+\])(:([^:]+))?$') _SIMPLE_HOST_MATCH = re.compile(r'^([^:]+|\[[^\]]+\])(:([^:]+))?$') try: from bson.tz_util import utc except ImportError: class _FixedOffset(tzinfo): def __init__(self, offset, name): self.__offset = timedelta(minutes=offset) self.__name = name def __getinitargs__(self): return self.__offset, self.__name def utcoffset(self, dt): return self.__offset def tzname(self, dt): return self.__name def dst(self, dt): return timedelta(0) utc = _FixedOffset(0, 'UTC') ASCENDING = 1 DESCENDING = -1 def utcnow(): """Simple wrapper for datetime.utcnow This provides a centralized definition of "now" in the mongomock realm, allowing users to transform the value of "now" to the future or the past, based on their testing needs. For example: ```python def test_x(self): with mock.patch("mongomock.utcnow") as mm_utc: mm_utc = datetime.utcnow() + timedelta(hours=100) # Test some things "100 hours" in the future ``` """ return datetime.utcnow() def print_deprecation_warning(old_param_name, new_param_name): warnings.warn( "'%s' has been deprecated to be in line with pymongo implementation, a new parameter '%s' " 'should be used instead. the old parameter will be kept for backward compatibility ' 'purposes.' % (old_param_name, new_param_name), DeprecationWarning) def create_index_list(key_or_list, direction=None): """Helper to generate a list of (key, direction) pairs. It takes such a list, or a single key, or a single key and direction. """ if isinstance(key_or_list, str): return [(key_or_list, direction or ASCENDING)] if not isinstance(key_or_list, (list, tuple, abc.Iterable)): raise TypeError('if no direction is specified, ' 'key_or_list must be an instance of list') return key_or_list def gen_index_name(index_list): """Generate an index name based on the list of keys with directions.""" return u'_'.join(['%s_%s' % item for item in index_list]) class hashdict(dict): """hashable dict implementation, suitable for use as a key into other dicts. >>> h1 = hashdict({'apples': 1, 'bananas':2}) >>> h2 = hashdict({'bananas': 3, 'mangoes': 5}) >>> h1+h2 hashdict(apples=1, bananas=3, mangoes=5) >>> d1 = {} >>> d1[h1] = 'salad' >>> d1[h1] 'salad' >>> d1[h2] Traceback (most recent call last): ... KeyError: hashdict(bananas=3, mangoes=5) based on answers from http://stackoverflow.com/questions/1151658/python-hashable-dicts """ def __key(self): return frozenset((k, hashdict(v) if isinstance(v, dict) else tuple(v) if isinstance(v, list) else v) for k, v in self.items()) def __repr__(self): return '{0}({1})'.format( self.__class__.__name__, ', '.join('{0}={1}'.format(str(i[0]), repr(i[1])) for i in sorted(self.__key()))) def __hash__(self): return hash(self.__key()) def __setitem__(self, key, value): raise TypeError('{0} does not support item assignment' .format(self.__class__.__name__)) def __delitem__(self, key): raise TypeError('{0} does not support item assignment' .format(self.__class__.__name__)) def clear(self): raise TypeError('{0} does not support item assignment' .format(self.__class__.__name__)) def pop(self, *args, **kwargs): raise TypeError('{0} does not support item assignment' .format(self.__class__.__name__)) def popitem(self, *args, **kwargs): raise TypeError('{0} does not support item assignment' .format(self.__class__.__name__)) def setdefault(self, *args, **kwargs): raise TypeError('{0} does not support item assignment' .format(self.__class__.__name__)) def update(self, *args, **kwargs): raise TypeError('{0} does not support item assignment' .format(self.__class__.__name__)) def __add__(self, right): result = hashdict(self) dict.update(result, right) return result def fields_list_to_dict(fields): """Takes a list of field names and returns a matching dictionary. ['a', 'b'] becomes {'a': 1, 'b': 1} and ['a.b.c', 'd', 'a.c'] becomes {'a.b.c': 1, 'd': 1, 'a.c': 1} """ as_dict = {} for field in fields: if not isinstance(field, str): raise TypeError('fields must be a list of key names, each an instance of str') as_dict[field] = 1 return as_dict def parse_uri(uri, default_port=27017, warn=False): """A simplified version of pymongo.uri_parser.parse_uri. Returns a dict with: - nodelist, a tuple of (host, port) - database the name of the database or None if no database is provided in the URI. An invalid MongoDB connection URI may raise an InvalidURI exception, however, the URI is not fully parsed and some invalid URIs may not result in an exception. 'mongodb://host1/database' becomes 'host1', 27017, 'database' and 'mongodb://host1' becomes 'host1', 27017, None """ SCHEME = 'mongodb://' if not uri.startswith(SCHEME): raise InvalidURI('Invalid URI scheme: URI ' "must begin with '%s'" % (SCHEME,)) scheme_free = uri[len(SCHEME):] if not scheme_free: raise InvalidURI('Must provide at least one hostname or IP.') dbase = None # Check for unix domain sockets in the uri if '.sock' in scheme_free: host_part, _, path_part = scheme_free.rpartition('/') if not host_part: host_part = path_part path_part = '' if '/' in host_part: raise InvalidURI("Any '/' in a unix domain socket must be" ' URL encoded: %s' % host_part) path_part = unquote_plus(path_part) else: host_part, _, path_part = scheme_free.partition('/') if not path_part and '?' in host_part: raise InvalidURI("A '/' is required between " 'the host list and any options.') nodelist = [] if ',' in host_part: hosts = host_part.split(',') else: hosts = [host_part] for host in hosts: match = _HOST_MATCH.match(host) if not match: raise ValueError( "Reserved characters such as ':' must be escaped according RFC " "2396. An IPv6 address literal must be enclosed in '[' and ']' " 'according to RFC 2732.') host = match.group(2) if host.startswith('[') and host.endswith(']'): host = host[1:-1] port = match.group(4) if port: try: port = int(port) if port < 0 or port > 65535: raise ValueError() except ValueError as err: raise ValueError('Port must be an integer between 0 and 65535:', port) from err else: port = default_port nodelist.append((host, port)) if path_part and path_part[0] != '?': dbase, _, _ = path_part.partition('?') if '.' in dbase: dbase, _ = dbase.split('.', 1) if dbase is not None: dbase = unquote_plus(dbase) return {'nodelist': tuple(nodelist), 'database': dbase} def split_hosts(hosts, default_port=27017): """Split the entity into a list of tuples of host and port.""" nodelist = [] for entity in hosts.split(','): port = default_port if entity.endswith('.sock'): port = None match = _SIMPLE_HOST_MATCH.match(entity) if not match: raise ValueError( "Reserved characters such as ':' must be escaped according RFC " "2396. An IPv6 address literal must be enclosed in '[' and ']' " 'according to RFC 2732.') host = match.group(1) if host.startswith('[') and host.endswith(']'): host = host[1:-1] if match.group(3): try: port = int(match.group(3)) if port < 0 or port > 65535: raise ValueError() except ValueError as err: raise ValueError('Port must be an integer between 0 and 65535:', port) from err nodelist.append((host, port)) return nodelist _LAST_TIMESTAMP_INC = [] def get_current_timestamp(): """Get the current timestamp as a bson Timestamp object.""" if not Timestamp: raise NotImplementedError('timestamp is not supported. Import pymongo to use it.') now = int(time.time()) if _LAST_TIMESTAMP_INC and _LAST_TIMESTAMP_INC[0] == now: _LAST_TIMESTAMP_INC[1] += 1 else: del _LAST_TIMESTAMP_INC[:] _LAST_TIMESTAMP_INC.extend([now, 1]) return Timestamp(now, _LAST_TIMESTAMP_INC[1]) def patch_datetime_awareness_in_document(value): # MongoDB is supposed to stock everything as timezone naive utc date # Hence we have to convert incoming datetimes to avoid errors while # mixing tz aware and naive. # On top of that, MongoDB date precision is up to millisecond, where Python # datetime use microsecond, so we must lower the precision to mimic mongo. for best_type in (OrderedDict, dict): if isinstance(value, best_type): return best_type((k, patch_datetime_awareness_in_document(v)) for k, v in value.items()) if isinstance(value, (tuple, list)): return [patch_datetime_awareness_in_document(item) for item in value] if isinstance(value, datetime): mongo_us = (value.microsecond // 1000) * 1000 if value.tzinfo: return (value - value.utcoffset()).replace(tzinfo=None, microsecond=mongo_us) return value.replace(microsecond=mongo_us) if Timestamp and isinstance(value, Timestamp) and not value.time and not value.inc: return get_current_timestamp() return value def make_datetime_timezone_aware_in_document(value): # MongoClient support tz_aware=True parameter to return timezone-aware # datetime objects. Given the date is stored internally without timezone # information, all returned datetime have utc as timezone. if isinstance(value, dict): return {k: make_datetime_timezone_aware_in_document(v) for k, v in value.items()} if isinstance(value, (tuple, list)): return [make_datetime_timezone_aware_in_document(item) for item in value] if isinstance(value, datetime): return value.replace(tzinfo=utc) return value def get_value_by_dot(doc, key, can_generate_array=False): """Get dictionary value using dotted key""" result = doc key_items = key.split('.') for key_index, key_item in enumerate(key_items): if isinstance(result, dict): result = result[key_item] elif isinstance(result, (list, tuple)): try: int_key = int(key_item) except ValueError as err: if not can_generate_array: raise KeyError(key_index) from err remaining_key = '.'.join(key_items[key_index:]) return [get_value_by_dot(subdoc, remaining_key) for subdoc in result] try: result = result[int_key] except (ValueError, IndexError) as err: raise KeyError(key_index) from err else: raise KeyError(key_index) return result def set_value_by_dot(doc, key, value): """Set dictionary value using dotted key""" try: parent_key, child_key = key.rsplit('.', 1) parent = get_value_by_dot(doc, parent_key) except ValueError: child_key = key parent = doc if isinstance(parent, dict): parent[child_key] = value elif isinstance(parent, (list, tuple)): try: parent[int(child_key)] = value except (ValueError, IndexError) as err: raise KeyError() from err else: raise KeyError() return doc def delete_value_by_dot(doc, key): """Delete dictionary value using dotted key. This function assumes that the value exists. """ try: parent_key, child_key = key.rsplit('.', 1) parent = get_value_by_dot(doc, parent_key) except ValueError: child_key = key parent = doc del parent[child_key] return doc def mongodb_to_bool(value): """Converts any value to bool the way MongoDB does it""" return value not in [False, None, 0] mongomock-4.1.3/mongomock/mongo_client.py000066400000000000000000000135101456276026200205420ustar00rootroot00000000000000from .database import Database from .store import ServerStore import itertools import mongomock from mongomock import codec_options as mongomock_codec_options from mongomock import ConfigurationError from mongomock import helpers from mongomock import read_preferences from packaging import version import warnings try: from pymongo.uri_parser import parse_uri, split_hosts from pymongo import ReadPreference _READ_PREFERENCE_PRIMARY = ReadPreference.PRIMARY except ImportError: from .helpers import parse_uri, split_hosts _READ_PREFERENCE_PRIMARY = read_preferences.PRIMARY def _convert_version_to_list(version_str): pieces = [int(part) for part in version_str.split('.')] return pieces + [0] * (4 - len(pieces)) class MongoClient(object): HOST = 'localhost' PORT = 27017 _CONNECTION_ID = itertools.count() def __init__(self, host=None, port=None, document_class=dict, tz_aware=False, connect=True, _store=None, read_preference=None, **kwargs): if host: self.host = host[0] if isinstance(host, (list, tuple)) else host else: self.host = self.HOST self.port = port or self.PORT self._tz_aware = tz_aware self._codec_options = mongomock_codec_options.CodecOptions(tz_aware=tz_aware) self._database_accesses = {} self._store = _store or ServerStore() self._id = next(self._CONNECTION_ID) self._document_class = document_class if read_preference is not None: read_preferences.ensure_read_preference_type('read_preference', read_preference) self._read_preference = read_preference or _READ_PREFERENCE_PRIMARY dbase = None if '://' in self.host: res = parse_uri(self.host, default_port=self.port, warn=True) self.host, self.port = res['nodelist'][0] dbase = res['database'] else: self.host, self.port = split_hosts(self.host, default_port=self.port)[0] self.__default_database_name = dbase self._server_version = mongomock.SERVER_VERSION def __getitem__(self, db_name): return self.get_database(db_name) def __getattr__(self, attr): return self[attr] def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def __repr__(self): return "mongomock.MongoClient('{0}', {1})".format(self.host, self.port) def __eq__(self, other): if isinstance(other, self.__class__): return self.address == other.address return NotImplemented if helpers.PYMONGO_VERSION >= version.parse('3.12'): def __hash__(self): return hash(self.address) def close(self): pass @property def is_mongos(self): return True @property def is_primary(self): return True @property def address(self): return self.host, self.port @property def read_preference(self): return self._read_preference @property def codec_options(self): return self._codec_options def server_info(self): return { 'version': self._server_version, 'sysInfo': 'Mock', 'versionArray': _convert_version_to_list(self._server_version), 'bits': 64, 'debug': False, 'maxBsonObjectSize': 16777216, 'ok': 1 } if helpers.PYMONGO_VERSION < version.parse('4.0'): def database_names(self): warnings.warn('database_names is deprecated. Use list_database_names instead.') return self.list_database_names() def list_database_names(self): return self._store.list_created_database_names() def drop_database(self, name_or_db): def drop_collections_for_db(_db): db_store = self._store[_db.name] for col_name in db_store.list_created_collection_names(): _db.drop_collection(col_name) if isinstance(name_or_db, Database): db = next(db for db in self._database_accesses.values() if db is name_or_db) if db: drop_collections_for_db(db) elif name_or_db in self._store: db = self.get_database(name_or_db) drop_collections_for_db(db) def get_database(self, name=None, codec_options=None, read_preference=None, write_concern=None, read_concern=None): if name is None: db = self.get_default_database( codec_options=codec_options, read_preference=read_preference, write_concern=write_concern, read_concern=read_concern, ) else: db = self._database_accesses.get(name) if db is None: db_store = self._store[name] db = self._database_accesses[name] = Database( self, name, read_preference=read_preference or self.read_preference, codec_options=codec_options or self._codec_options, _store=db_store, read_concern=read_concern) return db def get_default_database(self, default=None, **kwargs): name = self.__default_database_name name = name if name is not None else default if name is None: raise ConfigurationError('No default database name defined or provided.') return self.get_database(name=name, **kwargs) def alive(self): """The original MongoConnection.alive method checks the status of the server. In our case as we mock the actual server, we should always return True. """ return True def start_session(self, causal_consistency=True, default_transaction_options=None): """Start a logical session.""" raise NotImplementedError('Mongomock does not support sessions yet') mongomock-4.1.3/mongomock/not_implemented.py000066400000000000000000000020231456276026200212450ustar00rootroot00000000000000"""Module to handle features that are not implemented yet.""" _IGNORED_FEATURES = { 'array_filters': False, 'collation': False, 'let': False, 'session': False, } def _ensure_ignorable_feature(feature): if feature not in _IGNORED_FEATURES: raise KeyError( '%s is not an error that can be ignored: maybe it has been implemented in Mongomock. ' 'Here is the list of features that can be ignored: %s' % ( feature, _IGNORED_FEATURES.keys())) def ignore_feature(feature): """Ignore a feature instead of raising a NotImplementedError.""" _ensure_ignorable_feature(feature) _IGNORED_FEATURES[feature] = True def warn_on_feature(feature): """Rasie a NotImplementedError the next times a feature is used.""" _ensure_ignorable_feature(feature) _IGNORED_FEATURES[feature] = False def raise_for_feature(feature, reason): _ensure_ignorable_feature(feature) if _IGNORED_FEATURES[feature]: return False raise NotImplementedError(reason) mongomock-4.1.3/mongomock/object_id.py000066400000000000000000000010651456276026200200110ustar00rootroot00000000000000import uuid class ObjectId(object): def __init__(self, id=None): super(ObjectId, self).__init__() if id is None: self._id = uuid.uuid1() else: self._id = uuid.UUID(id) def __eq__(self, other): return isinstance(other, ObjectId) and other._id == self._id def __ne__(self, other): return not self == other def __hash__(self): return hash(self._id) def __repr__(self): return 'ObjectId({0})'.format(self._id) def __str__(self): return str(self._id) mongomock-4.1.3/mongomock/patch.py000066400000000000000000000065661456276026200172010ustar00rootroot00000000000000from .mongo_client import MongoClient import time try: from unittest import mock _IMPORT_MOCK_ERROR = None except ImportError: try: import mock _IMPORT_MOCK_ERROR = None except ImportError as error: _IMPORT_MOCK_ERROR = error try: import pymongo from pymongo.uri_parser import parse_uri, split_hosts _IMPORT_PYMONGO_ERROR = None except ImportError as error: from .helpers import parse_uri, split_hosts _IMPORT_PYMONGO_ERROR = error def _parse_any_host(host, default_port=27017): if isinstance(host, tuple): return _parse_any_host(host[0], host[1]) if '://' in host: return parse_uri(host, warn=True)['nodelist'] return split_hosts(host, default_port=default_port) def patch(servers='localhost', on_new='error'): """Patch pymongo.MongoClient. This will patch the class MongoClient and use mongomock to mock MongoDB servers. It keeps a consistant state of servers across multiple clients so you can do: ``` client = pymongo.MongoClient(host='localhost', port=27017) client.db.coll.insert_one({'name': 'Pascal'}) other_client = pymongo.MongoClient('mongodb://localhost:27017') client.db.coll.find_one() ``` The data is persisted as long as the patch lives. Args: on_new: Behavior when accessing a new server (not in servers): 'create': mock a new empty server, accept any client connection. 'error': raise a ValueError immediately when trying to access. 'timeout': behave as pymongo when a server does not exist, raise an error after a timeout. 'pymongo': use an actual pymongo client. servers: a list of server that are avaiable. """ if _IMPORT_MOCK_ERROR: raise _IMPORT_MOCK_ERROR # pylint: disable=raising-bad-type if _IMPORT_PYMONGO_ERROR: PyMongoClient = None else: PyMongoClient = pymongo.MongoClient persisted_clients = {} parsed_servers = set() for server in servers if isinstance(servers, (list, tuple)) else [servers]: parsed_servers.update(_parse_any_host(server)) def _create_persistent_client(*args, **kwargs): if _IMPORT_PYMONGO_ERROR: raise _IMPORT_PYMONGO_ERROR # pylint: disable=raising-bad-type client = MongoClient(*args, **kwargs) try: persisted_client = persisted_clients[client.address] client._store = persisted_client._store return client except KeyError: pass if client.address in parsed_servers or on_new == 'create': persisted_clients[client.address] = client return client if on_new == 'timeout': # TODO(pcorpet): Only wait when trying to access the server's data. time.sleep(kwargs.get('serverSelectionTimeoutMS', 30000)) raise pymongo.errors.ServerSelectionTimeoutError( '%s:%d: [Errno 111] Connection refused' % client.address) if on_new == 'pymongo': return PyMongoClient(*args, **kwargs) raise ValueError( 'MongoDB server %s:%d does not exist.\n' % client.address + '%s' % parsed_servers) class _PersistentClient: def __new__(cls, *args, **kwargs): return _create_persistent_client(*args, **kwargs) return mock.patch('pymongo.MongoClient', _PersistentClient) mongomock-4.1.3/mongomock/py.typed000066400000000000000000000000001456276026200172000ustar00rootroot00000000000000mongomock-4.1.3/mongomock/read_concern.py000066400000000000000000000007161456276026200205130ustar00rootroot00000000000000class ReadConcern(object): def __init__(self, level=None): self._document = {} if level is not None: self._document['level'] = level @property def level(self): return self._document.get('level') @property def ok_for_legacy(self): return True @property def document(self): return self._document.copy() def __eq__(self, other): return other.document == self.document mongomock-4.1.3/mongomock/read_preferences.py000066400000000000000000000015301456276026200213600ustar00rootroot00000000000000class _Primary(object): @property def mongos_mode(self): return 'primary' @property def mode(self): return 0 @property def name(self): return 'Primary' @property def document(self): return {'mode': 'primary'} @property def tag_sets(self): return [{}] @property def max_staleness(self): return -1 @property def min_wire_version(self): return 0 def ensure_read_preference_type(key, value): """Raise a TypeError if the value is not a type compatible for ReadPreference.""" for attr in ('document', 'mode', 'mongos_mode', 'max_staleness'): if not hasattr(value, attr): raise TypeError('{} must be an instance of {}'.format( key, 'pymongo.read_preference.ReadPreference')) PRIMARY = _Primary() mongomock-4.1.3/mongomock/results.py000066400000000000000000000067121456276026200175740ustar00rootroot00000000000000try: from pymongo.results import BulkWriteResult from pymongo.results import DeleteResult from pymongo.results import InsertManyResult from pymongo.results import InsertOneResult from pymongo.results import UpdateResult except ImportError: class _WriteResult(object): def __init__(self, acknowledged=True): self.__acknowledged = acknowledged @property def acknowledged(self): return self.__acknowledged class InsertOneResult(_WriteResult): __slots__ = ('__inserted_id', '__acknowledged') def __init__(self, inserted_id, acknowledged=True): self.__inserted_id = inserted_id super(InsertOneResult, self).__init__(acknowledged) @property def inserted_id(self): return self.__inserted_id class InsertManyResult(_WriteResult): __slots__ = ('__inserted_ids', '__acknowledged') def __init__(self, inserted_ids, acknowledged=True): self.__inserted_ids = inserted_ids super(InsertManyResult, self).__init__(acknowledged) @property def inserted_ids(self): return self.__inserted_ids class UpdateResult(_WriteResult): __slots__ = ('__raw_result', '__acknowledged') def __init__(self, raw_result, acknowledged=True): self.__raw_result = raw_result super(UpdateResult, self).__init__(acknowledged) @property def raw_result(self): return self.__raw_result @property def matched_count(self): if self.upserted_id is not None: return 0 return self.__raw_result.get('n', 0) @property def modified_count(self): return self.__raw_result.get('nModified') @property def upserted_id(self): return self.__raw_result.get('upserted') class DeleteResult(_WriteResult): __slots__ = ('__raw_result', '__acknowledged') def __init__(self, raw_result, acknowledged=True): self.__raw_result = raw_result super(DeleteResult, self).__init__(acknowledged) @property def raw_result(self): return self.__raw_result @property def deleted_count(self): return self.__raw_result.get('n', 0) class BulkWriteResult(_WriteResult): __slots__ = ('__bulk_api_result', '__acknowledged') def __init__(self, bulk_api_result, acknowledged): self.__bulk_api_result = bulk_api_result super(BulkWriteResult, self).__init__(acknowledged) @property def bulk_api_result(self): return self.__bulk_api_result @property def inserted_count(self): return self.__bulk_api_result.get('nInserted') @property def matched_count(self): return self.__bulk_api_result.get('nMatched') @property def modified_count(self): return self.__bulk_api_result.get('nModified') @property def deleted_count(self): return self.__bulk_api_result.get('nRemoved') @property def upserted_count(self): return self.__bulk_api_result.get('nUpserted') @property def upserted_ids(self): if self.__bulk_api_result: return dict((upsert['index'], upsert['_id']) for upsert in self.bulk_api_result['upserted']) mongomock-4.1.3/mongomock/store.py000066400000000000000000000124721456276026200172270ustar00rootroot00000000000000import collections import datetime import functools import mongomock from mongomock.thread import RWLock class ServerStore(object): """Object holding the data for a whole server (many databases).""" def __init__(self): self._databases = {} def __getitem__(self, db_name): try: return self._databases[db_name] except KeyError: db = self._databases[db_name] = DatabaseStore() return db def __contains__(self, db_name): return self[db_name].is_created def list_created_database_names(self): return [name for name, db in self._databases.items() if db.is_created] class DatabaseStore(object): """Object holding the data for a database (many collections).""" def __init__(self): self._collections = {} def __getitem__(self, col_name): try: return self._collections[col_name] except KeyError: col = self._collections[col_name] = CollectionStore(col_name) return col def __contains__(self, col_name): return self[col_name].is_created def list_created_collection_names(self): return [name for name, col in self._collections.items() if col.is_created] def create_collection(self, name): col = self[name] col.create() return col def rename(self, name, new_name): col = self._collections.pop(name, CollectionStore(new_name)) col.name = new_name self._collections[new_name] = col @property def is_created(self): return any(col.is_created for col in self._collections.values()) class CollectionStore(object): """Object holding the data for a collection.""" def __init__(self, name): self._documents = collections.OrderedDict() self.indexes = {} self._is_force_created = False self.name = name self._ttl_indexes = {} # 694 - Lock for safely iterating and mutating OrderedDicts self._rwlock = RWLock() def create(self): self._is_force_created = True @property def is_created(self): return self._documents or self.indexes or self._is_force_created def drop(self): self._documents = collections.OrderedDict() self.indexes = {} self._ttl_indexes = {} self._is_force_created = False def create_index(self, index_name, index_dict): self.indexes[index_name] = index_dict if index_dict.get('expireAfterSeconds') is not None: self._ttl_indexes[index_name] = index_dict def drop_index(self, index_name): self._remove_expired_documents() # The main index object should raise a KeyError, but the # TTL indexes have no meaning to the outside. del self.indexes[index_name] self._ttl_indexes.pop(index_name, None) @property def is_empty(self): self._remove_expired_documents() return not self._documents def __contains__(self, key): self._remove_expired_documents() with self._rwlock.reader(): return key in self._documents def __getitem__(self, key): self._remove_expired_documents() with self._rwlock.reader(): return self._documents[key] def __setitem__(self, key, val): with self._rwlock.writer(): self._documents[key] = val def __delitem__(self, key): with self._rwlock.writer(): del self._documents[key] def __len__(self): self._remove_expired_documents() with self._rwlock.reader(): return len(self._documents) @property def documents(self): self._remove_expired_documents() with self._rwlock.reader(): for doc in self._documents.values(): yield doc def _remove_expired_documents(self): for index in self._ttl_indexes.values(): self._expire_documents(index) def _expire_documents(self, index): # TODO(juannyg): use a caching mechanism to avoid re-expiring the documents if # we just did and no document was added / updated # Ignore non-integer values try: expiry = int(index['expireAfterSeconds']) except ValueError: return # Ignore commpound keys if len(index['key']) > 1: return # "key" structure = list of (field name, direction) tuples ttl_field_name = next(iter(index['key']))[0] ttl_now = mongomock.utcnow() with self._rwlock.reader(): expired_ids = [ doc['_id'] for doc in self._documents.values() if self._value_meets_expiry(doc.get(ttl_field_name), expiry, ttl_now) ] for exp_id in expired_ids: del self[exp_id] def _value_meets_expiry(self, val, expiry, ttl_now): val_to_compare = _get_min_datetime_from_value(val) try: return (ttl_now - val_to_compare).total_seconds() >= expiry except TypeError: return False def _get_min_datetime_from_value(val): if not val: return datetime.datetime.max if isinstance(val, list): return functools.reduce(_min_dt, [datetime.datetime.max] + val) return val def _min_dt(dt1, dt2): try: return dt1 if dt1 < dt2 else dt2 except TypeError: return dt1 mongomock-4.1.3/mongomock/thread.py000066400000000000000000000050401456276026200173330ustar00rootroot00000000000000from contextlib import contextmanager import threading class RWLock: """Lock enabling multiple readers but only 1 exclusive writer Source: https://cutt.ly/Ij70qaq """ def __init__(self): self._read_switch = _LightSwitch() self._write_switch = _LightSwitch() self._no_readers = threading.Lock() self._no_writers = threading.Lock() self._readers_queue = threading.RLock() @contextmanager def reader(self): self._reader_acquire() try: yield except Exception: # pylint: disable=W0706 raise finally: self._reader_release() @contextmanager def writer(self): self._writer_acquire() try: yield except Exception: # pylint: disable=W0706 raise finally: self._writer_release() def _reader_acquire(self): """Readers should block whenever a writer has acquired""" self._readers_queue.acquire() self._no_readers.acquire() self._read_switch.acquire(self._no_writers) self._no_readers.release() self._readers_queue.release() def _reader_release(self): self._read_switch.release(self._no_writers) def _writer_acquire(self): """Acquire the writer lock. Only the first writer will lock the readtry and then all subsequent writers can simply use the resource as it gets freed by the previous writer. The very last writer must release the readtry semaphore, thus opening the gate for readers to try reading. No reader can engage in the entry section if the readtry semaphore has been set by a writer previously """ self._write_switch.acquire(self._no_readers) self._no_writers.acquire() def _writer_release(self): self._no_writers.release() self._write_switch.release(self._no_readers) class _LightSwitch: """An auxiliary "light switch"-like object The first thread turns on the "switch", the last one turns it off. Source: https://cutt.ly/Ij70qaq """ def __init__(self): self._counter = 0 self._mutex = threading.RLock() def acquire(self, lock): self._mutex.acquire() self._counter += 1 if self._counter == 1: lock.acquire() self._mutex.release() def release(self, lock): self._mutex.acquire() self._counter -= 1 if self._counter == 0: lock.release() self._mutex.release() mongomock-4.1.3/mongomock/write_concern.py000066400000000000000000000021741456276026200207320ustar00rootroot00000000000000def _with_default_values(document): if 'w' in document: return document return dict(document, w=1) class WriteConcern(object): def __init__(self, w=None, wtimeout=None, j=None, fsync=None): self._document = {} if w is not None: self._document['w'] = w if wtimeout is not None: self._document['wtimeout'] = wtimeout if j is not None: self._document['j'] = j if fsync is not None: self._document['fsync'] = fsync def __eq__(self, other): try: return _with_default_values(other.document) == _with_default_values(self.document) except AttributeError: return NotImplemented def __ne__(self, other): try: return _with_default_values(other.document) != _with_default_values(self.document) except AttributeError: return NotImplemented @property def acknowledged(self): return True @property def document(self): return self._document.copy() @property def is_server_default(self): return not self._document mongomock-4.1.3/requirements.txt000066400000000000000000000000311456276026200170000ustar00rootroot00000000000000sentinels packaging pytz mongomock-4.1.3/setup.cfg000066400000000000000000000017751456276026200153550ustar00rootroot00000000000000[metadata] author = Rotem Yaari author_email = vmalloc@gmail.com name = mongomock description = Fake pymongo stub for testing simple MongoDB-dependent code long_description = file:README.rst description_file = README.rst license = BSD project_urls = Source = https://github.com/mongomock/mongomock classifiers = Development Status :: 5 - Production/Stable Intended Audience :: Developers Operating System :: MacOS :: MacOS X Operating System :: Microsoft :: Windows Operating System :: POSIX Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy Topic :: Database [files] packages = mongomock package-data = mongomock = py.typed, *.pyi [bdist_wheel] universal = True mongomock-4.1.3/setup.py000066400000000000000000000001071456276026200152320ustar00rootroot00000000000000from setuptools import setup setup(setup_requires=["pbr"], pbr=True) mongomock-4.1.3/tests/000077500000000000000000000000001456276026200146645ustar00rootroot00000000000000mongomock-4.1.3/tests/__init__.py000066400000000000000000000000001456276026200167630ustar00rootroot00000000000000mongomock-4.1.3/tests/connection_string/000077500000000000000000000000001456276026200204115ustar00rootroot00000000000000mongomock-4.1.3/tests/connection_string/test/000077500000000000000000000000001456276026200213705ustar00rootroot00000000000000mongomock-4.1.3/tests/connection_string/test/valid-auth.json000077500000000000000000000240021456276026200243220ustar00rootroot00000000000000{ "tests": [ { "auth": { "db": null, "password": "foo", "username": "alice" }, "description": "User info for single IPv4 host without database", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" } ], "options": null, "uri": "mongodb://alice:foo@127.0.0.1", "valid": true, "warning": false }, { "auth": { "db": "test", "password": "foo", "username": "alice" }, "description": "User info for single IPv4 host with database", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" } ], "options": null, "uri": "mongodb://alice:foo@127.0.0.1/test", "valid": true, "warning": false }, { "auth": { "db": "t\u0000est", "password": "f\u0000oo", "username": "a\u0000lice" }, "description": "User info for single IPv4 host with database (escaped null bytes)", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" } ], "options": null, "uri": "mongodb://a%00lice:f%00oo@127.0.0.1/t%00est", "valid": true, "warning": false }, { "auth": { "db": null, "password": "bar", "username": "bob" }, "description": "User info for single IP literal host without database", "hosts": [ { "host": "::1", "port": 27018, "type": "ip_literal" } ], "options": null, "uri": "mongodb://bob:bar@[::1]:27018", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "bar", "username": "bob" }, "description": "User info for single IP literal host with database", "hosts": [ { "host": "::1", "port": 27018, "type": "ip_literal" } ], "options": null, "uri": "mongodb://bob:bar@[::1]:27018/admin", "valid": true, "warning": false }, { "auth": { "db": null, "password": "baz", "username": "eve" }, "description": "User info for single hostname without database", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://eve:baz@example.com", "valid": true, "warning": false }, { "auth": { "db": "db2", "password": "baz", "username": "eve" }, "description": "User info for single hostname with database", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://eve:baz@example.com/db2", "valid": true, "warning": false }, { "auth": { "db": null, "password": "secret", "username": "alice" }, "description": "User info for multiple hosts without database", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" }, { "host": "example.com", "port": 27018, "type": "hostname" } ], "options": null, "uri": "mongodb://alice:secret@127.0.0.1,example.com:27018", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "secret", "username": "alice" }, "description": "User info for multiple hosts with database", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" }, { "host": "::1", "port": 27019, "type": "ip_literal" } ], "options": null, "uri": "mongodb://alice:secret@example.com,[::1]:27019/admin", "valid": true, "warning": false }, { "auth": { "db": null, "password": null, "username": "alice" }, "description": "Username without password", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" } ], "options": null, "uri": "mongodb://alice@127.0.0.1", "valid": true, "warning": false }, { "auth": { "db": null, "password": "", "username": "alice" }, "description": "Username with empty password", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" } ], "options": null, "uri": "mongodb://alice:@127.0.0.1", "valid": true, "warning": false }, { "auth": { "db": "my=db", "password": null, "username": "@l:ce" }, "description": "Escaped username and database without password", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://%40l%3Ace@example.com/my%3Ddb", "valid": true, "warning": false }, { "auth": { "db": "admin?", "password": "f:zzb@zz", "username": "$am" }, "description": "Escaped user info and database (MONGODB-CR)", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" } ], "options": { "authmechanism": "MONGODB-CR" }, "uri": "mongodb://%24am:f%3Azzb%40zz@127.0.0.1/admin%3F?authMechanism=MONGODB-CR", "valid": true, "warning": false }, { "auth": { "db": null, "password": null, "username": "CN=myName,OU=myOrgUnit,O=myOrg,L=myLocality,ST=myState,C=myCountry" }, "description": "Escaped username (MONGODB-X509)", "hosts": [ { "host": "localhost", "port": null, "type": "hostname" } ], "options": { "authmechanism": "MONGODB-X509" }, "uri": "mongodb://CN%3DmyName%2COU%3DmyOrgUnit%2CO%3DmyOrg%2CL%3DmyLocality%2CST%3DmyState%2CC%3DmyCountry@localhost/?authMechanism=MONGODB-X509", "valid": true, "warning": false }, { "auth": { "db": null, "password": "secret", "username": "user@EXAMPLE.COM" }, "description": "Escaped username (GSSAPI)", "hosts": [ { "host": "localhost", "port": null, "type": "hostname" } ], "options": { "authmechanism": "GSSAPI", "authmechanismproperties": { "CANONICALIZE_HOST_NAME": true, "SERVICE_NAME": "other" } }, "uri": "mongodb://user%40EXAMPLE.COM:secret@localhost/?authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:true&authMechanism=GSSAPI", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "secret", "username": "alice" }, "description": "At-signs in options aren't part of the userinfo", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": { "replicaset": "my@replicaset" }, "uri": "mongodb://alice:secret@example.com/admin?replicaset=my@replicaset", "valid": true, "warning": false } ] } mongomock-4.1.3/tests/connection_string/test/valid-host_identifiers.json000077500000000000000000000105571456276026200267350ustar00rootroot00000000000000{ "tests": [ { "auth": null, "description": "Single IPv4 host without port", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" } ], "options": null, "uri": "mongodb://127.0.0.1", "valid": true, "warning": false }, { "auth": null, "description": "Single IPv4 host with port", "hosts": [ { "host": "127.0.0.1", "port": 27018, "type": "ipv4" } ], "options": null, "uri": "mongodb://127.0.0.1:27018", "valid": true, "warning": false }, { "auth": null, "description": "Single IP literal host without port", "hosts": [ { "host": "::1", "port": null, "type": "ip_literal" } ], "options": null, "uri": "mongodb://[::1]", "valid": true, "warning": false }, { "auth": null, "description": "Single IP literal host with port", "hosts": [ { "host": "::1", "port": 27019, "type": "ip_literal" } ], "options": null, "uri": "mongodb://[::1]:27019", "valid": true, "warning": false }, { "auth": null, "description": "Single hostname without port", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://example.com", "valid": true, "warning": false }, { "auth": null, "description": "Single hostname with port", "hosts": [ { "host": "example.com", "port": 27020, "type": "hostname" } ], "options": null, "uri": "mongodb://example.com:27020", "valid": true, "warning": false }, { "auth": null, "description": "Single hostname (resembling IPv4) without port", "hosts": [ { "host": "256.0.0.1", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://256.0.0.1", "valid": true, "warning": false }, { "auth": null, "description": "Multiple hosts (mixed formats)", "hosts": [ { "host": "127.0.0.1", "port": null, "type": "ipv4" }, { "host": "::1", "port": 27018, "type": "ip_literal" }, { "host": "example.com", "port": 27019, "type": "hostname" } ], "options": null, "uri": "mongodb://127.0.0.1,[::1]:27018,example.com:27019", "valid": true, "warning": false }, { "auth": null, "description": "UTF-8 hosts", "hosts": [ { "host": "b\u00fccher.example.com", "port": null, "type": "hostname" }, { "host": "uml\u00e4ut.example.com", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://b\u00fccher.example.com,uml\u00e4ut.example.com/", "valid": true, "warning": false } ] } mongomock-4.1.3/tests/connection_string/test/valid-options.json000077500000000000000000000022711456276026200250600ustar00rootroot00000000000000{ "tests": [ { "auth": { "db": "admin", "password": "secret", "username": "alice" }, "description": "Option names are normalized to lowercase", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": { "authmechanism": "MONGODB-CR" }, "uri": "mongodb://alice:secret@example.com/admin?AUTHMechanism=MONGODB-CR", "valid": true, "warning": false }, { "auth": null, "description": "Option key and value (escaped null bytes)", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": { "replicaset": "my\u0000rs" }, "uri": "mongodb://example.com/?replicaSet=my%00rs", "valid": true, "warning": false } ] } mongomock-4.1.3/tests/connection_string/test/valid-unix_socket-absolute.json000077500000000000000000000202301456276026200275270ustar00rootroot00000000000000{ "tests": [ { "auth": null, "description": "Unix domain socket (absolute path with trailing slash)", "hosts": [ { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2Fmongodb-27017.sock/", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket (absolute path without trailing slash)", "hosts": [ { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket (absolute path with spaces in path)", "hosts": [ { "host": "/tmp/ /mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2F %2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": null, "description": "Multiple Unix domain sockets (absolute paths)", "hosts": [ { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "/tmp/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2Fmongodb-27017.sock,%2Ftmp%2Fmongodb-27018.sock", "valid": true, "warning": false }, { "auth": null, "description": "Multiple hosts (absolute path and ipv4)", "hosts": [ { "host": "127.0.0.1", "port": 27017, "type": "ipv4" }, { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://127.0.0.1:27017,%2Ftmp%2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": null, "description": "Multiple hosts (absolute path and hostname resembling relative path)", "hosts": [ { "host": "mongodb-27017.sock", "port": null, "type": "hostname" }, { "host": "/tmp/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://mongodb-27017.sock,%2Ftmp%2Fmongodb-27018.sock", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "foo", "username": "alice" }, "description": "Unix domain socket with auth database (absolute path)", "hosts": [ { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://alice:foo@%2Ftmp%2Fmongodb-27017.sock/admin", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket with path resembling socket file (absolute path with trailing slash)", "hosts": [ { "host": "/tmp/path.to.sock/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2Fpath.to.sock%2Fmongodb-27017.sock/", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket with path resembling socket file (absolute path without trailing slash)", "hosts": [ { "host": "/tmp/path.to.sock/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2Fpath.to.sock%2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "bar", "username": "bob" }, "description": "Unix domain socket with path resembling socket file and auth (absolute path)", "hosts": [ { "host": "/tmp/path.to.sock/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://bob:bar@%2Ftmp%2Fpath.to.sock%2Fmongodb-27017.sock/admin", "valid": true, "warning": false }, { "auth": { "db": "admin.sock", "password": null, "username": null }, "description": "Multiple Unix domain sockets and auth DB resembling a socket (absolute path)", "hosts": [ { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "/tmp/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2Fmongodb-27017.sock,%2Ftmp%2Fmongodb-27018.sock/admin.sock", "valid": true, "warning": false }, { "auth": { "db": "admin.shoe", "password": null, "username": null }, "description": "Multiple Unix domain sockets with auth DB resembling a path (absolute path)", "hosts": [ { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "/tmp/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://%2Ftmp%2Fmongodb-27017.sock,%2Ftmp%2Fmongodb-27018.sock/admin.shoe", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "bar", "username": "bob" }, "description": "Multiple Unix domain sockets with auth and query string (absolute path)", "hosts": [ { "host": "/tmp/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "/tmp/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": { "w": 1 }, "uri": "mongodb://bob:bar@%2Ftmp%2Fmongodb-27017.sock,%2Ftmp%2Fmongodb-27018.sock/admin?w=1", "valid": true, "warning": false } ] } mongomock-4.1.3/tests/connection_string/test/valid-unix_socket-relative.json000077500000000000000000000213601456276026200275310ustar00rootroot00000000000000{ "tests": [ { "auth": null, "description": "Unix domain socket (relative path with trailing slash)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fmongodb-27017.sock/", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket (relative path without trailing slash)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket (relative path with spaces)", "hosts": [ { "host": "rel/ /mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2F %2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": null, "description": "Multiple Unix domain sockets (relative paths)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "rel/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fmongodb-27017.sock,rel%2Fmongodb-27018.sock", "valid": true, "warning": false }, { "auth": null, "description": "Multiple Unix domain sockets (relative and absolute paths)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "/tmp/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fmongodb-27017.sock,%2Ftmp%2Fmongodb-27018.sock", "valid": true, "warning": false }, { "auth": null, "description": "Multiple hosts (relative path and ipv4)", "hosts": [ { "host": "127.0.0.1", "port": 27017, "type": "ipv4" }, { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://127.0.0.1:27017,rel%2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": null, "description": "Multiple hosts (relative path and hostname resembling relative path)", "hosts": [ { "host": "mongodb-27017.sock", "port": null, "type": "hostname" }, { "host": "rel/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://mongodb-27017.sock,rel%2Fmongodb-27018.sock", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "foo", "username": "alice" }, "description": "Unix domain socket with auth database (relative path)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://alice:foo@rel%2Fmongodb-27017.sock/admin", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket with path resembling socket file (relative path with trailing slash)", "hosts": [ { "host": "rel/path.to.sock/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fpath.to.sock%2Fmongodb-27017.sock/", "valid": true, "warning": false }, { "auth": null, "description": "Unix domain socket with path resembling socket file (relative path without trailing slash)", "hosts": [ { "host": "rel/path.to.sock/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fpath.to.sock%2Fmongodb-27017.sock", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "bar", "username": "bob" }, "description": "Unix domain socket with path resembling socket file and auth (relative path)", "hosts": [ { "host": "rel/path.to.sock/mongodb-27017.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://bob:bar@rel%2Fpath.to.sock%2Fmongodb-27017.sock/admin", "valid": true, "warning": false }, { "auth": { "db": "admin.sock", "password": null, "username": null }, "description": "Multiple Unix domain sockets and auth DB resembling a socket (relative path)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "rel/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fmongodb-27017.sock,rel%2Fmongodb-27018.sock/admin.sock", "valid": true, "warning": false }, { "auth": { "db": "admin.shoe", "password": null, "username": null }, "description": "Multiple Unix domain sockets with auth DB resembling a path (relative path)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "rel/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": null, "uri": "mongodb://rel%2Fmongodb-27017.sock,rel%2Fmongodb-27018.sock/admin.shoe", "valid": true, "warning": false }, { "auth": { "db": "admin", "password": "bar", "username": "bob" }, "description": "Multiple Unix domain sockets with auth and query string (relative path)", "hosts": [ { "host": "rel/mongodb-27017.sock", "port": null, "type": "unix" }, { "host": "rel/mongodb-27018.sock", "port": null, "type": "unix" } ], "options": { "w": 1 }, "uri": "mongodb://bob:bar@rel%2Fmongodb-27017.sock,rel%2Fmongodb-27018.sock/admin?w=1", "valid": true, "warning": false } ] } mongomock-4.1.3/tests/connection_string/test/valid-warnings.json000077500000000000000000000036731456276026200252240ustar00rootroot00000000000000{ "tests": [ { "auth": null, "description": "Unrecognized option keys are ignored", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://example.com/?foo=bar", "valid": true, "warning": true }, { "auth": null, "description": "Unsupported option values are ignored", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": null, "uri": "mongodb://example.com/?fsync=ifPossible", "valid": true, "warning": true }, { "auth": null, "description": "Repeated option keys", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": { "replicaset": "test" }, "uri": "mongodb://example.com/?replicaSet=test&replicaSet=test", "valid": true, "warning": true }, { "auth": null, "description": "Deprecated (or unknown) options are ignored if replacement exists", "hosts": [ { "host": "example.com", "port": null, "type": "hostname" } ], "options": { "wtimeoutms": 10 }, "uri": "mongodb://example.com/?wtimeout=5&wtimeoutMS=10", "valid": true, "warning": true } ] } mongomock-4.1.3/tests/diff.py000066400000000000000000000056521456276026200161560ustar00rootroot00000000000000import datetime import decimal from platform import python_version import re import uuid try: from bson import decimal128, Regex _HAVE_PYMONGO = True except ImportError: _HAVE_PYMONGO = False class _NO_VALUE(object): pass # we don't use NOTHING because it might be returned from various APIs NO_VALUE = _NO_VALUE() _SUPPORTED_BASE_TYPES = ( float, bool, str, datetime.datetime, type(None), uuid.UUID, int, bytes, type, type(re.compile('')),) if _HAVE_PYMONGO: _SUPPORTED_TYPES = _SUPPORTED_BASE_TYPES + (decimal.Decimal, decimal128.Decimal128) else: _SUPPORTED_TYPES = _SUPPORTED_BASE_TYPES if python_version() < '3.0': dict_type = dict else: from collections import abc dict_type = abc.Mapping def diff(a, b, path=None): path = _make_path(path) if isinstance(a, (list, tuple)) and isinstance(b, (list, tuple)): return _diff_sequences(a, b, path) if type(a).__name__ == 'SON': a = dict(a) if type(b).__name__ == 'SON': b = dict(b) if type(a).__name__ == 'DBRef': a = a.as_doc() if type(b).__name__ == 'DBRef': b = b.as_doc() if isinstance(a, dict_type) and isinstance(b, dict_type): return _diff_dicts(a, b, path) if type(a).__name__ == 'ObjectId': a = str(a) if type(b).__name__ == 'ObjectId': b = str(b) if type(a).__name__ == 'Int64': a = int(a) if type(b).__name__ == 'Int64': b = int(b) if _HAVE_PYMONGO and isinstance(a, Regex): a = a.try_compile() if _HAVE_PYMONGO and isinstance(b, Regex): b = b.try_compile() if isinstance(a, (list, tuple)) or isinstance(b, (list, tuple)) or \ isinstance(a, dict_type) or isinstance(b, dict_type): return [(path[:], a, b)] if not isinstance(a, _SUPPORTED_TYPES): raise NotImplementedError( 'Unsupported diff type: {0}'.format(type(a))) # pragma: no cover if not isinstance(b, _SUPPORTED_TYPES): raise NotImplementedError( 'Unsupported diff type: {0}'.format(type(b))) # pragma: no cover if a != b: return [(path[:], a, b)] return [] def _diff_dicts(a, b, path): if not isinstance(a, type(b)): return [(path[:], type(a), type(b))] returned = [] for key in set(a) | set(b): a_value = a.get(key, NO_VALUE) b_value = b.get(key, NO_VALUE) path.append(key) if a_value is NO_VALUE or b_value is NO_VALUE: returned.append((path[:], a_value, b_value)) else: returned.extend(diff(a_value, b_value, path)) path.pop() return returned def _diff_sequences(a, b, path): if len(a) != len(b): return [(path[:], a, b)] returned = [] for i, a_i in enumerate(a): path.append(i) returned.extend(diff(a_i, b[i], path)) path.pop() return returned def _make_path(path): if path is None: return [] return path mongomock-4.1.3/tests/multicollection.py000066400000000000000000000120341456276026200204440ustar00rootroot00000000000000from .diff import diff from collections import OrderedDict import copy import functools from mongomock.helpers import RE_TYPE _COMPARE_EXCEPTIONS = 'exceptions' class MultiCollection(object): def __init__(self, conns): super(MultiCollection, self).__init__() self.conns = conns.copy() self.do = Foreach(self.conns, compare=False) self.compare = Foreach(self.conns, compare=True) self.compare_ignore_order = Foreach( self.conns, compare=True, ignore_order=True) self.compare_exceptions = Foreach(self.conns, compare=_COMPARE_EXCEPTIONS) class Foreach(object): def __init__(self, objs, compare, ignore_order=False, method_result_decorators=()): self.___objs = objs self.___compare = compare self.___ignore_order = ignore_order self.___decorators = list(method_result_decorators) self.___sort_by = None def __getattr__(self, method_name): return ForeachMethod( self.___objs, self.___compare, self.___ignore_order, method_name, self.___decorators, self.___sort_by) def sort_by(self, fun): self.___sort_by = fun return self def __call__(self, *decorators): return Foreach( self.___objs, self.___compare, self.___ignore_order, self.___decorators + list(decorators)) class ForeachMethod(object): def __init__(self, objs, compare, ignore_order, method_name, decorators, sort_by): self.___objs = objs self.___compare = compare self.___ignore_order = ignore_order self.___method_name = method_name self.___decorators = decorators self.___sort_by = sort_by def _call(self, obj, args, kwargs): # copying the args and kwargs is important, because pymongo changes # the dicts (fits them with the _id) return self.___apply_decorators( getattr(obj, self.___method_name)(*_deepcopy(args), **_deepcopy(kwargs))) def _get_exception_type(self, obj, args, kwargs, name): try: self._call(obj, args, kwargs) assert False, 'No exception raised for ' + name except Exception as err: return type(err) def __call__(self, *args, **kwargs): if self.___compare == _COMPARE_EXCEPTIONS: results = dict( (name, self._get_exception_type(obj, args, kwargs, name=name)) for name, obj in self.___objs.items() ) else: results = dict( (name, self._call(obj, args, kwargs)) for name, obj in self.___objs.items() ) if self.___compare: _assert_no_diff(results, ignore_order=self.___ignore_order, sort_by=self.___sort_by) return results def ___apply_decorators(self, obj): for d in self.___decorators: obj = d(obj) return obj def _assert_no_diff(results, ignore_order, sort_by): if _result_is_cursor(results) or _result_is_command_cursor(results): # If we were given a sort function, use that. if sort_by is not None: value_processor = functools.partial(_expand_cursor, sort=ignore_order, by=sort_by) else: value_processor = functools.partial(_expand_cursor, sort=ignore_order) else: assert not ignore_order value_processor = None prev_name = prev_value = None for index, (name, value) in enumerate(results.items()): if value_processor is not None: value = value_processor(value) if index > 0: d = diff(prev_value, value) assert not d, _format_diff_message(prev_name, name, d) prev_name = name prev_value = value def _result_is_cursor(results): return any(type(result).__name__ == 'Cursor' for result in results.values()) def _result_is_command_cursor(results): return any(type(result).__name__ == 'CommandCursor' for result in results.values()) def by_id(document): return str(document.get('_id', str(document))) def _expand_cursor(cursor, sort, by=by_id): returned = [result.copy() for result in cursor] if sort: returned.sort(key=by) for result in returned: result.pop('_id', None) return returned def _format_diff_message(a_name, b_name, diff_list): msg = 'Unexpected Diff:' for (path, a_value, b_value) in diff_list: a_path = [a_name] + path b_path = [b_name] + path msg += '\n\t{} != {} ({} != {})'.format( '.'.join(map(str, a_path)), '.'.join( map(str, b_path)), a_value, b_value) return msg def _deepcopy(x): """Deepcopy, but ignore regex objects...""" if isinstance(x, RE_TYPE): return x if isinstance(x, list) or isinstance(x, tuple): return type(x)(_deepcopy(y) for y in x) if isinstance(x, (dict, OrderedDict)): return type(x)((_deepcopy(k), _deepcopy(v)) for k, v in x.items()) return copy.deepcopy(x) mongomock-4.1.3/tests/test__bulk_operations.py000066400000000000000000000252531456276026200216430ustar00rootroot00000000000000import os import mongomock from mongomock import helpers from packaging import version try: from unittest import mock _HAVE_MOCK = True except ImportError: try: import mock _HAVE_MOCK = True except ImportError: _HAVE_MOCK = False try: import pymongo except ImportError: pymongo = None from tests.multicollection import MultiCollection from unittest import TestCase, skipIf # https://pymongo.readthedocs.io/en/stable/migrate-to-pymongo4.html#collection-initialize-ordered-bulk-op-and-initialize-unordered-bulk-op-is-removed @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above') class BulkOperationsTest(TestCase): test_with_pymongo = False def setUp(self): super(BulkOperationsTest, self).setUp() if self.test_with_pymongo: self.client = pymongo.MongoClient(host=os.environ.get('TEST_MONGO_HOST', 'localhost')) else: self.client = mongomock.MongoClient() self.db = self.client['somedb'] self.db.collection.drop() for _i in 'abx': self.db.collection.create_index( _i, unique=False, name='idx' + _i, sparse=True, background=True) self.bulk_op = self.db.collection.initialize_ordered_bulk_op() def __check_document(self, doc, count=1): found_num = self.db.collection.find(doc).count() if found_num != count: all = list(self.db.collection.find()) self.fail('Document %s count()=%s BUT expected count=%s! All' ' documents: %s' % (doc, found_num, count, all)) def __check_result(self, result, **expecting_values): for key in ('nModified', 'nUpserted', 'nMatched', 'writeErrors', 'upserted', 'writeConcernErrors', 'nRemoved', 'nInserted'): exp_val = expecting_values.get(key) has_val = result.get(key) if self.test_with_pymongo and key == 'nModified' and has_val is None: # ops, real pymongo did not returned 'nModified' key! continue self.assertFalse(has_val is None, "Missed key '%s' in result: %s" % (key, result)) if exp_val: self.assertEqual( exp_val, has_val, 'Invalid result %s=%s (but expected value=%s)' % ( key, has_val, exp_val)) else: self.assertFalse( bool(has_val), 'Received unexpected value %s = %s' % (key, has_val)) def __execute_and_check_result(self, write_concern=None, **expecting_result): result = self.bulk_op.execute(write_concern=write_concern) self.__check_result(result, **expecting_result) def __check_number_of_elements(self, count): has_count = self.db.collection.count() self.assertEqual( has_count, count, 'There is %s documents but there should be %s' % (has_count, count)) def test__insert(self): self.bulk_op.insert({'a': 1, 'b': 2}) self.bulk_op.insert({'a': 2, 'b': 4}) self.bulk_op.insert({'a': 2, 'b': 6}) self.__check_number_of_elements(0) self.__execute_and_check_result(nInserted=3) self.__check_document({'a': 1, 'b': 2}) self.__check_document({'a': 2, 'b': 4}) self.__check_document({'a': 2, 'b': 6}) def test__bulk_update_must_raise_error_if_missed_operator(self): self.assertRaises(ValueError, self.bulk_op.find({'a': 1}).update, {'b': 20}) def test__bulk_execute_must_raise_error_if_bulk_empty(self): self.assertRaises(mongomock.InvalidOperation, self.bulk_op.execute) def test_update(self): self.bulk_op.find({'a': 1}).update({'$set': {'b': 20}}) self.__execute_and_check_result() self.__check_number_of_elements(0) def test__update_must_update_all_documents(self): self.db.collection.insert_one({'a': 1, 'b': 2}) self.db.collection.insert_one({'a': 2, 'b': 4}) self.db.collection.insert_one({'a': 2, 'b': 8}) self.bulk_op.find({'a': 1}).update({'$set': {'b': 20}}) self.bulk_op.find({'a': 2}).update({'$set': {'b': 40}}) self.__check_document({'a': 1, 'b': 2}) self.__check_document({'a': 2, 'b': 4}) self.__check_document({'a': 2, 'b': 8}) self.__execute_and_check_result(nMatched=3, nModified=3) self.__check_document({'a': 1, 'b': 20}) self.__check_document({'a': 2, 'b': 40}, 2) def test__ordered_insert_and_update(self): self.bulk_op.insert({'a': 1, 'b': 2}) self.bulk_op.find({'a': 1}).update({'$set': {'b': 3}}) self.__execute_and_check_result(nInserted=1, nMatched=1, nModified=1) self.__check_document({'a': 1, 'b': 3}) def test__update_one(self): self.db.collection.insert_one({'a': 2, 'b': 1}) self.db.collection.insert_one({'a': 2, 'b': 2}) self.bulk_op.find({'a': 2}).update_one({'$set': {'b': 3}}) self.__execute_and_check_result(nMatched=1, nModified=1) self.__check_document({'a': 2}, count=2) self.__check_number_of_elements(2) def test__remove(self): self.db.collection.insert_one({'a': 2, 'b': 1}) self.db.collection.insert_one({'a': 2, 'b': 2}) self.bulk_op.find({'a': 2}).remove() self.__execute_and_check_result(nRemoved=2) self.__check_number_of_elements(0) def test__remove_one(self): self.db.collection.insert_one({'a': 2, 'b': 1}) self.db.collection.insert_one({'a': 2, 'b': 2}) self.bulk_op.find({'a': 2}).remove_one() self.__execute_and_check_result(nRemoved=1) self.__check_document({'a': 2}, 1) self.__check_number_of_elements(1) @skipIf(not _HAVE_MOCK, 'The mock library is not installed') def test_upsert_replace_one_on_empty_set(self): self.bulk_op.find({}).upsert().replace_one({'x': 1}) self.__execute_and_check_result(nUpserted=1, upserted=[{'index': 0, '_id': mock.ANY}]) def test_upsert_replace_one(self): self.db.collection.insert_one({'a': 2, 'b': 1}) self.db.collection.insert_one({'a': 2, 'b': 2}) self.bulk_op.find({'a': 2}).replace_one({'x': 1}) self.__execute_and_check_result(nModified=1, nMatched=1) self.__check_document({'a': 2}, 1) self.__check_document({'x': 1}, 1) self.__check_number_of_elements(2) @skipIf(not _HAVE_MOCK, 'The mock library is not installed') def test_upsert_update_on_empty_set(self): self.bulk_op.find({}).upsert().update({'$set': {'a': 1, 'b': 2}}) self.__execute_and_check_result(nUpserted=1, upserted=[{'index': 0, '_id': mock.ANY}]) self.__check_document({'a': 1, 'b': 2}) self.__check_number_of_elements(1) def test_upsert_update(self): self.db.collection.insert_one({'a': 2, 'b': 1}) self.db.collection.insert_one({'a': 2, 'b': 2}) self.bulk_op.find({'a': 2}).upsert().update({'$set': {'b': 3}}) self.__execute_and_check_result(nMatched=2, nModified=2) self.__check_document({'a': 2, 'b': 3}, 2) self.__check_number_of_elements(2) def test_upsert_update_one(self): self.db.collection.insert_one({'a': 2, 'b': 1}) self.db.collection.insert_one({'a': 2, 'b': 1}) self.bulk_op.find({'a': 2}).upsert().update_one({'$inc': {'b': 1, 'x': 1}}) self.__execute_and_check_result(nModified=1, nMatched=1) self.__check_document({'a': 2, 'b': 1}, 1) self.__check_document({'a': 2, 'b': 2, 'x': 1}, 1) self.__check_number_of_elements(2) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') @skipIf(os.getenv('NO_LOCAL_MONGO'), 'No local Mongo server running') class BulkOperationsWithPymongoTest(BulkOperationsTest): test_with_pymongo = True @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') @skipIf(os.getenv('NO_LOCAL_MONGO'), 'No local Mongo server running') # https://pymongo.readthedocs.io/en/stable/migrate-to-pymongo4.html#collection-initialize-ordered-bulk-op-and-initialize-unordered-bulk-op-is-removed @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above') class CollectionComparisonTest(TestCase): def setUp(self): super(CollectionComparisonTest, self).setUp() self.fake_conn = mongomock.MongoClient() self.mongo_conn = pymongo.MongoClient(host=os.environ.get('TEST_MONGO_HOST', 'localhost')) self.db_name = 'mongomock___testing_db' self.collection_name = 'mongomock___testing_collection' self.mongo_conn[self.db_name][self.collection_name].remove() self.cmp = MultiCollection({ 'fake': self.fake_conn[self.db_name][self.collection_name], 'real': self.mongo_conn[self.db_name][self.collection_name], }) self.bulks = MultiCollection({ 'fake': self.cmp.conns['fake'].initialize_ordered_bulk_op(), 'real': self.cmp.conns['real'].initialize_ordered_bulk_op() }) # hacky! Depending on mongo server version 'nModified' is returned or not.. # so let make simple bulk operation to know what's the server behaviour... coll = self.mongo_conn[self.db_name]['mongomock_testing_prepare_test'] bulk = coll.initialize_ordered_bulk_op() bulk.insert({'a': 1}) insert_returns_nmodified = 'nModified' in bulk.execute() bulk = self.cmp.conns['real'].initialize_ordered_bulk_op() bulk.find({'a': 1}).update({'$set': {'a': 2}}) update_returns_nmodified = 'nModified' in bulk.execute() coll.drop() self.bulks.conns['fake']._set_nModified_policy( insert_returns_nmodified, update_returns_nmodified) def test__insert(self): self.bulks.do.insert({'a': 1, 'b': 1}) self.bulks.do.insert({'a': 2, 'b': 2}) self.bulks.do.insert({'a': 2, 'b': 2}) self.bulks.compare.execute() def test__mixed_operations(self): self.cmp.do.insert({'a': 1, 'b': 3}) self.cmp.do.insert({'a': 2, 'c': 1}) self.cmp.do.insert({'a': 2, 'c': 2}) self.cmp.do.insert({'a': 3, 'c': 1}) self.cmp.do.insert({'a': 4, 'd': 2}) self.cmp.do.insert({'a': 5, 'd': 11}) self.cmp.do.insert({'a': 5, 'd': 22}) self.bulks.do.insert({'a': 1, 'b': 1}) for bwo in self.bulks.do.find({'a': 2}).values(): bwo.remove_one() for bwo in self.bulks.do.find({'a': 3}).values(): bwo.update({'$inc': {'b': 1}}) for bwo in self.bulks.do.find({'a': 4}).values(): bwo.upsert().replace_one({'b': 11, 'x': 'y'}) for bwo in self.bulks.do.find({'a': 5}).values(): bwo.upsert().update({'$inc': {'b': 11}}) self.bulks.compare.execute() self.cmp.compare.find(sort=[('a', 1), ('b', 1), ('c', 1), ('d', 1)]) mongomock-4.1.3/tests/test__client_api.py000066400000000000000000000157771456276026200205640ustar00rootroot00000000000000from packaging import version import sys import unittest from unittest import skipIf, skipUnless try: from unittest import mock _HAVE_MOCK = True except ImportError: try: import mock _HAVE_MOCK = True except ImportError: _HAVE_MOCK = False import mongomock from mongomock import helpers try: from bson import codec_options from pymongo.read_preferences import ReadPreference except ImportError: pass class MongoClientApiTest(unittest.TestCase): def test__read_preference(self): client = mongomock.MongoClient() self.assertEqual('Primary', client.read_preference.name) self.assertEqual(client.read_preference, client.db.read_preference) self.assertEqual(client.read_preference, client.db.coll.read_preference) client2 = mongomock.MongoClient(read_preference=client.read_preference) self.assertEqual(client2.read_preference, client.read_preference) with self.assertRaises(TypeError): mongomock.MongoClient(read_preference=0) @unittest.skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__different_read_preference(self): client = mongomock.MongoClient(read_preference=ReadPreference.NEAREST) self.assertEqual(ReadPreference.NEAREST, client.db.read_preference) self.assertEqual(ReadPreference.NEAREST, client.db.coll.read_preference) @unittest.skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__codec_options_with_pymongo(self): client = mongomock.MongoClient() self.assertEqual(codec_options.CodecOptions(), client.codec_options) self.assertFalse(client.codec_options.tz_aware) def test__codec_options(self): client = mongomock.MongoClient() self.assertFalse(client.codec_options.tz_aware) client = mongomock.MongoClient(tz_aware=True) self.assertTrue(client.codec_options.tz_aware) self.assertTrue(client.db.collection.codec_options.tz_aware) with self.assertRaises(TypeError): mongomock.MongoClient(tz_aware='True') def test__parse_url(self): client = mongomock.MongoClient('mongodb://localhost:27017/') self.assertEqual(('localhost', 27017), client.address) client = mongomock.MongoClient('mongodb://localhost:1234,example.com/') self.assertEqual(('localhost', 1234), client.address) client = mongomock.MongoClient('mongodb://example.com,localhost:1234/') self.assertEqual(('example.com', 27017), client.address) client = mongomock.MongoClient('mongodb://[::1]:1234/') self.assertEqual(('::1', 1234), client.address) with self.assertRaises(ValueError): mongomock.MongoClient('mongodb://localhost:1234:456/') with self.assertRaises(ValueError): mongomock.MongoClient('mongodb://localhost:123456/') with self.assertRaises(ValueError): mongomock.MongoClient('mongodb://localhost:mongoport/') def test__equality(self): self.assertEqual( mongomock.MongoClient('mongodb://localhost:27017/'), mongomock.MongoClient('mongodb://localhost:27017/')) self.assertEqual( mongomock.MongoClient('mongodb://localhost:27017/'), mongomock.MongoClient('localhost')) self.assertNotEqual( mongomock.MongoClient('/var/socket/mongo.sock'), mongomock.MongoClient('localhost')) @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipUnless( helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__not_hashable(self): with self.assertRaises(TypeError): {mongomock.MongoClient('localhost')} # pylint: disable=expression-not-assigned @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipIf( helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__hashable(self): {mongomock.MongoClient('localhost')} # pylint: disable=expression-not-assigned def test__parse_hosts(self): client = mongomock.MongoClient('localhost') self.assertEqual(('localhost', 27017), client.address) client = mongomock.MongoClient('localhost:1234,example.com') self.assertEqual(('localhost', 1234), client.address) client = mongomock.MongoClient('example.com,localhost:1234') self.assertEqual(('example.com', 27017), client.address) client = mongomock.MongoClient('[::1]:1234') self.assertEqual(('::1', 1234), client.address) client = mongomock.MongoClient('/var/socket/mongo.sock') self.assertEqual(('/var/socket/mongo.sock', None), client.address) with self.assertRaises(ValueError): mongomock.MongoClient('localhost:1234:456') with self.assertRaises(ValueError): mongomock.MongoClient('localhost:123456') with self.assertRaises(ValueError): mongomock.MongoClient('localhost:mongoport') @unittest.skipIf(not _HAVE_MOCK, 'mock not installed') def test_database_names(self): client = mongomock.MongoClient() client.one_db.my_collec.insert_one({}) if helpers.PYMONGO_VERSION >= version.parse('4.0'): with self.assertRaises(TypeError): client.database_names() return with mock.patch('warnings.warn') as mock_warn: self.assertEqual(['one_db'], client.database_names()) self.assertEqual(1, mock_warn.call_count) self.assertIn('deprecated', mock_warn.call_args[0][0]) def test_list_database_names(self): client = mongomock.MongoClient() self.assertEqual([], client.list_database_names()) # Query a non existant collection. client.one_db.my_collec.find_one() self.assertEqual([], client.list_database_names()) client.one_db.my_collec.insert_one({}) self.assertEqual(['one_db'], client.list_database_names()) def test_client_implements_context_managers(self): with mongomock.MongoClient() as client: client.one_db.my_collec.insert_one({}) result = client.one_db.my_collec.find_one({}) self.assertTrue(result) def test_start_session(self): client = mongomock.MongoClient() with self.assertRaises(NotImplementedError): client.start_session() @mock.patch('mongomock.SERVER_VERSION', '3.6') def test_server_version(self): client = mongomock.MongoClient() server_info = client.server_info() self.assertEqual('3.6', server_info['version']) self.assertEqual([3, 6, 0, 0], server_info['versionArray']) def test_consistent_server_version(self): client = mongomock.MongoClient() server_info = client.server_info() with mock.patch('mongomock.SERVER_VERSION', '3.6'): self.assertEqual(server_info, client.server_info()) mongomock-4.1.3/tests/test__collection_api.py000066400000000000000000010561601456276026200214310ustar00rootroot00000000000000import collections import copy from datetime import datetime, tzinfo, timedelta from packaging import version import platform import random import re import sys from tests.diff import diff import time from unittest import TestCase, skipIf, skipUnless import uuid import warnings import mongomock from mongomock import helpers try: from unittest import mock _HAVE_MOCK = True except ImportError: try: import mock _HAVE_MOCK = True except ImportError: _HAVE_MOCK = False try: from bson import codec_options from bson.errors import InvalidDocument from bson import tz_util, ObjectId, Regex, decimal128, Timestamp, DBRef import pymongo from pymongo.collation import Collation from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo import ReturnDocument from pymongo.write_concern import WriteConcern except ImportError: from mongomock.collection import ReturnDocument from mongomock import ObjectId from mongomock.read_concern import ReadConcern from mongomock.write_concern import WriteConcern from tests.utils import DBRef warnings.simplefilter('ignore', DeprecationWarning) IS_PYPY = platform.python_implementation() != 'CPython' SERVER_VERSION = version.parse(mongomock.SERVER_VERSION) class UTCPlus2(tzinfo): def fromutc(self, dt): return dt + self.utcoffset(dt) def tzname(self, dt): return '' def utcoffset(self, dt): return timedelta(hours=2) def dst(self, dt): return timedelta() class CollectionAPITest(TestCase): def setUp(self): super(CollectionAPITest, self).setUp() self.client = mongomock.MongoClient() self.db = self.client['somedb'] def test__get_subcollections(self): self.db.create_collection('a.b') self.assertEqual(self.db.a.b.full_name, 'somedb.a.b') self.assertEqual(self.db.a.b.name, 'a.b') self.assertEqual(set(self.db.list_collection_names()), set(['a.b'])) def test__get_subcollections_by_attribute_underscore(self): with self.assertRaises(AttributeError) as err_context: self.db.a._b # pylint: disable=pointless-statement self.assertIn("Collection has no attribute '_b'", str(err_context.exception)) # No problem accessing it through __get_item__. self.db.a['_b'].insert_one({'a': 1}) self.assertEqual(1, self.db.a['_b'].find_one().get('a')) def test__get_sibling_collection(self): self.assertEqual(self.db.a.database.b.full_name, 'somedb.b') self.assertEqual(self.db.a.database.b.name, 'b') def test__get_collection_read_concern_option(self): """Ensure read_concern option isn't rejected.""" self.assertTrue(self.db.get_collection('new_collection', read_concern=None)) def test__get_collection_full_name(self): self.assertEqual(self.db.coll.name, 'coll') self.assertEqual(self.db.coll.full_name, 'somedb.coll') def test__cursor_collection(self): self.assertIs(self.db.a.find().collection, self.db.a) def test__cursor_alive(self): self.db.collection.insert_one({'foo': 'bar'}) cursor = self.db.collection.find() self.assertTrue(cursor.alive) next(cursor) self.assertFalse(cursor.alive) def test__cursor_collation(self): self.db.collection.insert_one({'foo': 'bar'}) cursor = self.db.collection.find(collation='fr') self.assertEqual('fr', cursor.collation) def test__drop_collection(self): self.db.create_collection('a') self.db.create_collection('b') self.db.create_collection('c') self.db.drop_collection('b') self.db.drop_collection('b') self.db.drop_collection(self.db.c) self.assertEqual(set(self.db.list_collection_names()), set(['a'])) col = self.db.a r = col.insert_one({'aa': 'bb'}).inserted_id self.assertEqual(col.count_documents({'_id': r}), 1) self.db.drop_collection('a') self.assertEqual(col.count_documents({'_id': r}), 0) col = self.db.a r = col.insert_one({'aa': 'bb'}).inserted_id self.assertEqual(col.count_documents({'_id': r}), 1) self.assertIsInstance(col._store._documents, collections.OrderedDict) self.db.drop_collection(col) self.assertIsInstance(col._store._documents, collections.OrderedDict) self.assertEqual(col.count_documents({'_id': r}), 0) def test__drop_collection_indexes(self): col = self.db.a col.create_index('simple') col.create_index([('value', 1)], unique=True) col.create_index([('sparsed', 1)], unique=True, sparse=True) self.db.drop_collection(col) # Make sure indexes' rules no longer apply col.insert_one({'value': 'not_unique_but_ok', 'sparsed': 'not_unique_but_ok'}) col.insert_one({'value': 'not_unique_but_ok'}) col.insert_one({'sparsed': 'not_unique_but_ok'}) self.assertEqual(col.count_documents({}), 3) def test__drop_n_recreate_collection(self): col_a = self.db.create_collection('a') col_a2 = self.db.a col_a.insert_one({'foo': 'bar'}) self.assertEqual(col_a.count_documents({}), 1) self.assertEqual(col_a2.count_documents({}), 1) self.assertEqual(self.db.a.count_documents({}), 1) self.db.drop_collection('a') self.assertEqual(col_a.count_documents({}), 0) self.assertEqual(col_a2.count_documents({}), 0) self.assertEqual(self.db.a.count_documents({}), 0) col_a2.insert_one({'foo2': 'bar2'}) self.assertEqual(col_a.count_documents({}), 1) self.assertEqual(col_a2.count_documents({}), 1) self.assertEqual(self.db.a.count_documents({}), 1) def test__cursor_hint(self): self.db.collection.insert_one({'f1': {'f2': 'v'}}) cursor = self.db.collection.find() self.assertEqual(cursor, cursor.hint(None)) cursor.hint('unknownIndex') self.assertEqual([{'f2': 'v'}], [d['f1'] for d in cursor]) with self.assertRaises(mongomock.InvalidOperation): cursor.hint(None) def test__distinct_nested_field(self): self.db.collection.insert_one({'f1': {'f2': 'v'}}) cursor = self.db.collection.find() self.assertEqual(cursor.distinct('f1.f2'), ['v']) def test__distinct_array_field(self): self.db.collection.insert_many( [{'f1': ['v1', 'v2', 'v1']}, {'f1': ['v2', 'v3']}]) cursor = self.db.collection.find() self.assertEqual(set(cursor.distinct('f1')), set(['v1', 'v2', 'v3'])) def test__distinct_array_nested_field(self): self.db.collection.insert_one({'f1': [{'f2': 'v'}, {'f2': 'w'}]}) cursor = self.db.collection.find() self.assertEqual(set(cursor.distinct('f1.f2')), {'v', 'w'}) def test__distinct_document_field(self): self.db.collection.insert_many([ {'f1': {'f2': 'v2', 'f3': 'v3'}}, {'f1': {'f2': 'v2', 'f3': 'v3'}} ]) cursor = self.db.collection.find() self.assertEqual(cursor.distinct('f1'), [{'f2': 'v2', 'f3': 'v3'}]) def test__distinct_array_field_with_dicts(self): self.db.collection.insert_many([ {'f1': [{'f2': 'v2'}, {'f3': 'v3'}]}, {'f1': [{'f3': 'v3'}, {'f4': 'v4'}]}, ]) cursor = self.db.collection.find() self.assertCountEqual(cursor.distinct('f1'), [{'f2': 'v2'}, {'f3': 'v3'}, {'f4': 'v4'}]) def test__distinct_filter_field(self): self.db.collection.insert_many( [{'f1': 'v1', 'k1': 'v1'}, {'f1': 'v2', 'k1': 'v1'}, {'f1': 'v3', 'k1': 'v2'}]) self.assertEqual(set(self.db.collection.distinct('f1', {'k1': 'v1'})), set(['v1', 'v2'])) def test__distinct_error(self): with self.assertRaises(TypeError): self.db.collection.distinct({'f1': 1}) def test__cursor_clone(self): self.db.collection.insert_many([{'a': 'b'}, {'b': 'c'}, {'c': 'd'}]) cursor1 = self.db.collection.find() iterator1 = iter(cursor1) first_item = next(iterator1) cursor2 = cursor1.clone() iterator2 = iter(cursor2) self.assertEqual(next(iterator2), first_item) for item in iterator1: self.assertEqual(item, next(iterator2)) with self.assertRaises(StopIteration): next(iterator2) def test__cursor_clone_keep_limit_skip(self): self.db.collection.insert_many([{'a': 'b'}, {'b': 'c'}, {'c': 'd'}]) cursor1 = self.db.collection.find()[1:2] cursor2 = cursor1.clone() result1 = list(cursor1) result2 = list(cursor2) self.assertEqual(result1, result2) cursor3 = self.db.collection.find(skip=1, limit=1) cursor4 = cursor3.clone() result3 = list(cursor3) result4 = list(cursor4) self.assertEqual(result3, result4) def test_cursor_returns_document_copies(self): obj = {'a': 1, 'b': 2} self.db.collection.insert_one(obj) fetched_obj = self.db.collection.find_one({'a': 1}) self.assertEqual(fetched_obj, obj) fetched_obj['b'] = 3 refetched_obj = self.db.collection.find_one({'a': 1}) self.assertNotEqual(fetched_obj, refetched_obj) def test_cursor_with_projection_returns_value_copies(self): self.db.collection.insert_one({'a': ['b']}) fetched_list = self.db.collection.find_one(projection=['a'])['a'] self.assertEqual(fetched_list, ['b']) fetched_list.append('c') refetched_list = self.db.collection.find_one(projection=['a'])['a'] self.assertEqual(refetched_list, ['b']) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'update was removed in pymongo v4') def test__update_retval(self): self.db.col.insert_one({'a': 1}) retval = self.db.col.update({'a': 1}, {'b': 2}) self.assertIsInstance(retval, dict) self.assertIsInstance(retval['connectionId'], int) self.assertIsNone(retval['err']) self.assertEqual(retval['n'], 1) self.assertTrue(retval['updatedExisting']) self.assertEqual(retval['ok'], 1.0) self.assertEqual(self.db.col.update({'bla': 1}, {'bla': 2})['n'], 0) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'remove was removed in pymongo v4') def test__remove_retval(self): self.db.col.insert_one({'a': 1}) retval = self.db.col.remove({'a': 1}) self.assertIsInstance(retval, dict) self.assertIsInstance(retval['connectionId'], int) self.assertIsNone(retval['err']) self.assertEqual(retval['n'], 1) self.assertEqual(retval['ok'], 1.0) self.assertEqual(self.db.col.remove({'bla': 1})['n'], 0) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'remove was removed in pymongo v4') def test__remove_write_concern(self): self.db.col.remove({'a': 1}, w=None, wtimeout=None, j=None, fsync=None) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'remove was removed in pymongo v4') def test__remove_bad_write_concern(self): with self.assertRaises(TypeError): self.db.col.remove({'a': 1}, bad_kwarg=1) def test__getting_collection_via_getattr(self): col1 = self.db.some_collection_here col2 = self.db.some_collection_here self.assertIs(col1, col2) self.assertIs(col1, self.db['some_collection_here']) self.assertIsInstance(col1, mongomock.Collection) def test__save_class_deriving_from_dict(self): # See https://github.com/vmalloc/mongomock/issues/52 class Document(dict): def __init__(self, collection): self.collection = collection super(Document, self).__init__() self.save() def save(self): self.collection.insert_one(self) doc = Document(self.db.collection) self.assertIn('_id', doc) self.assertNotIn('collection', doc) def test__getting_collection_via_getitem(self): col1 = self.db['some_collection_here'] col2 = self.db['some_collection_here'] self.assertIs(col1, col2) self.assertIs(col1, self.db.some_collection_here) self.assertIsInstance(col1, mongomock.Collection) def test__cannot_insert_non_string_keys(self): for key in [2, 2.0, True, object()]: with self.assertRaises(ValueError): self.db.col1.insert_one({key: 'value'}) def assert_document_count(self, count=1): self.assertEqual(len(self.db.collection._store), count) def assert_document_stored(self, doc_id, expected=None): self.assertIn(doc_id, self.db.collection._store) if expected is not None: expected = expected.copy() expected['_id'] = doc_id doc = self.db.collection._store[doc_id] self.assertDictEqual(doc, expected) def assert_documents(self, documents, ignore_ids=True): projection = {'_id': False} if ignore_ids else None self.assertListEqual( list(self.db.collection.find(projection=projection)), documents) @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION >= version.parse('4.0'), 'insert was removed in pymongo v4') def test__insert(self): self.db.collection.insert({'a': 1}) self.assert_document_count(1) self.db.collection.insert([{'a': 2}, {'a': 3}]) self.assert_document_count(3) self.db.collection.insert( {'a': 4}, check_keys=False, continue_on_error=True) self.assert_document_count(4) self.db.collection.insert({'a': 4}, w=1) self.assert_document_count(5) def test__insert_one(self): document = {'a': 1} result = self.db.collection.insert_one(document) self.assert_document_stored(result.inserted_id, document) def test__insert_one_type_error(self): with self.assertRaises(TypeError): self.db.collection.insert_one([{'a': 1}]) self.assert_document_count(0) with self.assertRaises(TypeError): self.db.collection.insert_one('a') self.assert_document_count(0) def test__insert_many(self): documents = [{'a': 1}, {'b': 2}] result = self.db.collection.insert_many(documents) self.assertIsInstance(result.inserted_ids, list) for i, doc_id in enumerate(result.inserted_ids): self.assert_document_stored(doc_id, documents[i]) def test__insert_many_with_generator(self): documents = [{'a': 1}, {'b': 2}] documents_generator = (doc for doc in [{'a': 1}, {'b': 2}]) result = self.db.collection.insert_many(documents_generator) self.assertIsInstance(result.inserted_ids, list) self.assertEqual(2, len(result.inserted_ids), result) for i, doc_id in enumerate(result.inserted_ids): self.assert_document_stored(doc_id, documents[i]) def test__insert_many_type_error(self): with self.assertRaises(TypeError): self.db.collection.insert_many({'a': 1}) self.assert_document_count(0) with self.assertRaises(TypeError): self.db.collection.insert_many('a') self.assert_document_count(0) with self.assertRaises(TypeError): self.db.collection.insert_many(5) self.assert_document_count(0) with self.assertRaises(TypeError): self.db.collection.insert_many([]) self.assert_document_count(0) def test__insert_many_type_error_do_not_insert(self): with self.assertRaises(TypeError): self.db.collection.insert_many([{'a': 1}, 'a']) self.assert_document_count(0) def test__insert_many_write_errors(self): self.db.collection.insert_one({'_id': 'a'}) # Insert many, but the first one is a duplicate. with self.assertRaises(mongomock.BulkWriteError) as err_context: self.db.collection.insert_many([{'_id': 'a', 'culprit': True}, {'_id': 'b'}]) error_details = err_context.exception.details self.assertEqual({'nInserted', 'writeErrors'}, set(error_details.keys())) self.assertEqual(0, error_details['nInserted']) self.assertEqual( [{'_id': 'a', 'culprit': True}], [e['op'] for e in error_details['writeErrors']]) # Insert many, and only the second one is a duplicate. with self.assertRaises(mongomock.BulkWriteError) as err_context: self.db.collection.insert_many([{'_id': 'c'}, {'_id': 'a', 'culprit': True}]) error_details = err_context.exception.details self.assertEqual({'nInserted', 'writeErrors'}, set(error_details.keys())) self.assertEqual(1, error_details['nInserted']) self.assertEqual( [{'_id': 'a', 'culprit': True}], [e['op'] for e in error_details['writeErrors']]) # Insert many, with ordered=False. with self.assertRaises(mongomock.BulkWriteError) as err_context: self.db.collection.insert_many([ {'_id': 'a', 'culprit': True}, {'_id': 'b'}, {'_id': 'c', 'culprit': True}, ], ordered=False) error_details = err_context.exception.details self.assertEqual({'nInserted', 'writeErrors'}, set(error_details.keys())) self.assertEqual([0, 2], sorted(e['index'] for e in error_details['writeErrors'])) self.assertEqual(1, error_details['nInserted']) self.assertEqual({'a', 'b', 'c'}, {doc['_id'] for doc in self.db.collection.find()}) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'count was removed in pymongo v4') def test__count(self): self.db.collection.insert_many([ {'a': 1, 's': 0}, {'a': 2, 's': 0}, {'a': 3, 's': 1} ]) self.assertEqual(self.db.collection.count(), 3) self.assertEqual(self.db.collection.count({'s': 0}), 2) self.assertEqual(self.db.collection.count({'s': 1}), 1) def test__count_documents(self): self.db.collection.insert_many([ {'a': 1, 's': 0}, {'a': 2, 's': 0}, {'_id': 'unique', 'a': 3, 's': 1} ]) self.assertEqual(3, self.db.collection.count_documents({})) self.assertEqual(2, self.db.collection.count_documents({'s': 0})) self.assertEqual(1, self.db.collection.count_documents({'s': 1})) self.assertEqual(2, self.db.collection.count_documents({}, skip=1)) self.assertEqual(1, self.db.collection.count_documents({}, skip=1, limit=1)) error_kwargs = [ {'unknownKwarg': None}, {'limit': 'one'}, {'limit': -1}, {'limit': 0}, ] for error_kwarg in error_kwargs: with self.assertRaises(mongomock.OperationFailure): self.db.collection.count_documents({}, **error_kwarg) with self.assertRaises(NotImplementedError): self.db.collection.count_documents({}, collation='fr') with self.assertRaises(mongomock.OperationFailure): self.db.collection.count_documents('unique') def test__find_returns_cursors(self): collection = self.db.collection self.assertEqual(type(collection.find()).__name__, 'Cursor') self.assertNotIsInstance(collection.find(), list) self.assertNotIsInstance(collection.find(), tuple) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__find_with_collation(self): collection = self.db.collection collation = Collation('fr') cursor = collection.find({}, collation=collation) self.assertEqual(cursor._collation, collation) def test__find_removed_and_changed_options(self): """Test that options that have been removed are rejected.""" options = [ {'slave_okay': True}, {'as_class': dict}, {'network_timeout': 10}, {'secondary_acceptable_latency_ms': 10}, {'max_scan': 10}, {'snapshot': True}, {'tailable': True}, {'await_data': True}, {'exhaust': True}, {'fields': {'a': 1}}, {'timeout': 10}, {'partial': True} ] for option in options: with self.assertRaises(mongomock.OperationFailure): self.db.collection.find({}, **option) @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION >= version.parse('4.0'), 'find_and_modify was removed in pymongo v4') def test__find_and_modify_cannot_remove_and_new(self): with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_and_modify({}, remove=True, new=True) @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION >= version.parse('4.0'), 'find_and_modify was removed in pymongo v4') def test__find_and_modify_cannot_remove_and_update(self): with self.assertRaises(ValueError): # this is also what pymongo raises self.db.collection.find_and_modify({'a': 2}, {'a': 3}, remove=True) def test__find_one_and_update_doc_with_zero_ids(self): ret = self.db.col_a.find_one_and_update( {'_id': 0}, {'$inc': {'counter': 1}}, upsert=True, return_document=ReturnDocument.AFTER) self.assertEqual(ret, {'_id': 0, 'counter': 1}) ret = self.db.col_a.find_one_and_update( {'_id': 0}, {'$inc': {'counter': 1}}, upsert=True, return_document=ReturnDocument.AFTER) self.assertEqual(ret, {'_id': 0, 'counter': 2}) ret = self.db.col_b.find_one_and_update( {'_id': 0}, {'$inc': {'counter': 1}}, upsert=True, return_document=ReturnDocument.BEFORE) self.assertIsNone(ret) ret = self.db.col_b.find_one_and_update( {'_id': 0}, {'$inc': {'counter': 1}}, upsert=True, return_document=ReturnDocument.BEFORE) self.assertEqual(ret, {'_id': 0, 'counter': 1}) def test__find_one_and_replace_return_document_after_upsert(self): collection = self.db.col collection.insert_one({'_id': 123, 'val': 5}) ret = collection.find_one_and_replace( {'val': 1}, {'val': 7}, upsert=True, return_document=ReturnDocument.AFTER) self.assertTrue(ret) self.assertEqual(7, ret['val']) @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION >= version.parse('4.0'), 'find_and_modify was removed in pymongo v4') def test__find_and_modify_no_projection_kwarg(self): with self.assertRaises(TypeError): # unlike pymongo, we warn about this self.db.collection.find_and_modify({'a': 2}, {'a': 3}, projection=['a']) def test__find_one_and_delete(self): documents = [ {'x': 1, 's': 0}, {'x': 2, 's': 1} ] self.db.collection.insert_many(documents) self.assert_documents(documents, ignore_ids=False) doc = self.db.collection.find_one_and_delete({'x': 3}) self.assert_documents(documents, ignore_ids=False) self.assertIsNone(doc) doc = self.db.collection.find_one_and_delete({'x': 2}) self.assert_documents(documents[:-1], ignore_ids=False) self.assertDictEqual(doc, documents[1]) doc = self.db.collection.find_one_and_delete( {'s': 0}, {'_id': False, 'x': True}) self.assertEqual(doc, {'x': 1}) def test__find_one_and_replace(self): documents = [ {'x': 1, 's': 0}, {'x': 1, 's': 1} ] self.db.collection.insert_many(documents) self.assert_documents(documents, ignore_ids=False) doc = self.db.collection.find_one_and_replace( {'s': 3}, {'x': 2, 's': 1}) self.assert_documents(documents, ignore_ids=False) self.assertIsNone(doc) doc = self.db.collection.find_one_and_replace( {'s': 1}, {'x': 2, 's': 1}) self.assertDictEqual(doc, documents[1]) self.assert_document_count(2) doc = self.db.collection.find_one_and_replace( {'s': 2}, {'x': 3, 's': 0}, upsert=True) self.assertIsNone(doc) self.assertIsNotNone(self.db.collection.find_one({'x': 3})) self.assert_document_count(3) replacement = {'x': 4, 's': 1} doc = self.db.collection.find_one_and_replace( {'s': 1}, replacement, return_document=ReturnDocument.AFTER) doc.pop('_id') self.assertDictEqual(doc, replacement) def test__find_one_and_update(self): documents = [ {'x': 1, 's': 0}, {'x': 1, 's': 1} ] self.db.collection.insert_many(documents) self.assert_documents(documents, ignore_ids=False) doc = self.db.collection.find_one_and_update( {'s': 3}, {'$set': {'x': 2}}) self.assertIsNone(doc) self.assert_documents(documents, ignore_ids=False) doc = self.db.collection.find_one_and_update( {'s': 1}, {'$set': {'x': 2}}) self.assertDictEqual(doc, documents[1]) doc = self.db.collection.find_one_and_update( {'s': 3}, {'$set': {'x': 3, 's': 2}}, upsert=True) self.assertIsNone(doc) self.assertIsNotNone(self.db.collection.find_one({'x': 3})) update = {'x': 4, 's': 1} doc = self.db.collection.find_one_and_update( {'s': 1}, {'$set': update}, return_document=ReturnDocument.AFTER) doc.pop('_id') self.assertDictEqual(doc, update) def test__find_in_empty_collection(self): self.db.collection.drop() # Valid filter. self.db.collection.find_one({'a.b': 3}) # Invalid filter. with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'$or': []}) # Do not raise when creating the cursor. cursor = self.db.collection.find({'$or': []}) # Only raise when using it. with self.assertRaises(mongomock.OperationFailure): next(cursor) def test__regex_options(self): self.db.collection.drop() self.db.collection.insert_one({'a': 'TADA'}) self.db.collection.insert_one({'a': 'TA\nDA'}) self.assertFalse(self.db.collection.find_one({'a': {'$regex': 'tada'}})) self.assertTrue(self.db.collection.find_one({'a': { '$regex': re.compile('tada', re.IGNORECASE), }})) self.assertTrue(self.db.collection.find_one({'a': {'$regex': 'tada', '$options': 'i'}})) self.assertTrue(self.db.collection.find_one({'a': {'$regex': '^da', '$options': 'im'}})) self.assertFalse(self.db.collection.find_one({'a': {'$regex': 'tada', '$options': 'I'}})) self.assertTrue(self.db.collection.find_one({'a': {'$regex': 'TADA', '$options': 'z'}})) self.assertTrue(self.db.collection.find_one({'a': collections.OrderedDict([ ('$regex', re.compile('tada')), ('$options', 'i'), ])})) self.assertTrue(self.db.collection.find_one({'a': collections.OrderedDict([ ('$regex', re.compile('tada', re.IGNORECASE)), ('$options', 'm'), ])})) # Bad type for $options. with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'a': {'$regex': 'tada', '$options': re.I}}) # Bug https://jira.mongodb.org/browse/SERVER-38621 self.assertTrue(self.db.collection.find_one({'a': collections.OrderedDict([ ('$options', 'i'), ('$regex', re.compile('tada')), ])})) def test__iterate_on_find_and_update(self): documents = [ {'x': 1, 's': 0}, {'x': 1, 's': 1}, {'x': 1, 's': 2}, {'x': 1, 's': 3} ] self.db.collection.insert_many(documents) self.assert_documents(documents, ignore_ids=False) self.assertEqual(self.db.collection.count_documents({'x': 1}), 4) # Update the field used by the cursor's filter should not upset the iteration for doc in self.db.collection.find({'x': 1}): self.db.collection.update_one({'_id': doc['_id']}, {'$set': {'x': 2}}) self.assertEqual(self.db.collection.count_documents({'x': 1}), 0) self.assertEqual(self.db.collection.count_documents({'x': 2}), 4) def test__update_interns_lists_and_dicts(self): obj = {} obj_id = self.db.collection.insert_one(obj).inserted_id external_dict = {} external_list = [] self.db.collection.replace_one({'_id': obj_id}, {'d': external_dict, 'l': external_list}) external_dict['a'] = 'b' external_list.append(1) self.assertEqual( list(self.db.collection.find()), [{'_id': obj_id, 'd': {}, 'l': []}]) def test__update_cannot_change__id(self): self.db.collection.insert_one({'_id': 1, 'a': 1}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.replace_one({'_id': 1}, {'_id': 2, 'b': 2}) def test__update_empty_id(self): self.db.collection.insert_one({'_id': '', 'a': 1}) self.db.collection.replace_one({'_id': ''}, {'b': 1}) doc = self.db.collection.find_one({'_id': ''}) self.assertEqual(1, doc['b']) def test__update_one(self): insert_result = self.db.collection.insert_one({'a': 1}) update_result = self.db.collection.update_one( filter={'a': 1}, update={'$set': {'a': 2}} ) self.assertEqual(update_result.matched_count, 1) self.assertEqual(update_result.modified_count, 1) self.assertIsNone(update_result.upserted_id) doc = self.db.collection.find_one({'a': 2}) self.assertEqual(insert_result.inserted_id, doc['_id']) self.assertEqual(doc['a'], 2) def test__update_id(self): self.db.collection.insert_one({'a': 1}) with self.assertRaises(mongomock.WriteError): self.db.collection.update_one({'a': 1}, {'$set': {'a': 2, '_id': 42}}) self.assertEqual(1, self.db.collection.find_one({})['a']) def test__update_one_upsert(self): self.assert_document_count(0) update_result = self.db.collection.update_one( filter={'a': 1}, update={'$set': {'a': 1}}, upsert=True ) self.assertEqual(update_result.modified_count, 0) self.assertEqual(update_result.matched_count, 0) self.assertIsNotNone(update_result.upserted_id) self.assert_document_stored(update_result.upserted_id, {'a': 1}) def test__update_one_upsert_dots(self): self.assert_document_count(0) update_result = self.db.collection.update_one( filter={'a.b': 1}, update={'$set': {'c': 2}}, upsert=True ) self.assertEqual(update_result.modified_count, 0) self.assertEqual(update_result.matched_count, 0) self.assertIsNotNone(update_result.upserted_id) self.assert_document_stored(update_result.upserted_id, {'a': {'b': 1}, 'c': 2}) def test__update_one_upsert_match_subdocuments(self): update_result = self.db.collection.update_one( filter={'b.c.': 1, 'b.d': 3}, update={'$set': {'a': 1}}, upsert=True ) self.assertEqual(update_result.modified_count, 0) self.assertEqual(update_result.matched_count, 0) self.assertIsNotNone(update_result.upserted_id) self.assert_document_stored( update_result.upserted_id, {'a': 1, 'b': {'c': {'': 1}, 'd': 3}}) def test__update_one_upsert_operators(self): self.assert_document_count(0) update_result = self.db.collection.update_one( filter={'a.b': {'$eq': 1}, 'e.f': {'$gt': 3}, 'd': {}}, update={'$set': {'c': 2}}, upsert=True ) self.assertEqual(update_result.modified_count, 0) self.assertEqual(update_result.matched_count, 0) self.assertIsNotNone(update_result.upserted_id) self.assert_document_stored(update_result.upserted_id, {'c': 2, 'd': {}, 'a': {'b': 1}}) def test__update_one_unset_position(self): insert_result = self.db.collection.insert_one({'a': 1, 'b': [{'c': 2, 'd': 3}]}) update_result = self.db.collection.update_one( filter={'a': 1, 'b': {'$elemMatch': {'c': 2, 'd': 3}}}, update={'$unset': {'b.$.c': ''}} ) self.assertEqual(update_result.modified_count, 1) self.assertEqual(update_result.matched_count, 1) self.assert_document_stored(insert_result.inserted_id, {'a': 1, 'b': [{'d': 3}]}) def test__update_one_no_change(self): self.db.collection.insert_one({'a': 1}) update_result = self.db.collection.update_one( filter={'a': 1}, update={'$set': {'a': 1}} ) self.assertEqual(update_result.matched_count, 1) self.assertEqual(update_result.modified_count, 0) def test__rename_one_foo_to_bar(self): input_ = {'_id': 1, 'foo': 'bar'} expected = {'_id': 1, 'bar': 'bar'} insert_result = self.db.collection.insert_one(input_) query = {'_id': 1} update = {'$rename': {'foo': 'bar'}} update_result = self.db.collection.update_one(query, update=update) self.assertEqual(update_result.modified_count, 1) self.assertEqual(update_result.matched_count, 1) self.assert_document_stored(insert_result.inserted_id, expected) def test__rename_missing_field(self): input_ = {'_id': 1, 'foo': 'bar'} insert_result = self.db.collection.insert_one(input_) query = {'_id': 1} update = {'$rename': {'bar': 'foo'}} update_result = self.db.collection.update_one(query, update=update) self.assertEqual(update_result.modified_count, 0) self.assertEqual(update_result.matched_count, 1) self.assert_document_stored(insert_result.inserted_id, input_) def test__rename_unsupported(self): input_ = {'_id': 1, 'foo': 'bar'} insert_result = self.db.collection.insert_one(input_) self.assert_document_stored(insert_result.inserted_id, input_) query = {'_id': 1} update = {'$rename': {'foo': 'f.o.o.'}} self.assertRaises(NotImplementedError, self.db.collection.update_one, query, update=update) def test__update_one_upsert_invalid_filter(self): with self.assertRaises(mongomock.WriteError): self.db.collection.update_one( filter={'a.b': 1, 'a': 3}, update={'$set': {'c': 2}}, upsert=True ) def test__update_one_hint(self): self.db.collection.insert_one({'a': 1}) with self.assertRaises(NotImplementedError): self.db.collection.update_one( filter={'a': 1}, update={'$set': {'a': 1}}, hint='a', ) def test__update_many(self): self.db.collection.insert_many([ {'a': 1, 'c': 2}, {'a': 1, 'c': 3}, {'a': 2, 'c': 4} ]) update_result = self.db.collection.update_many( filter={'a': 1}, update={'$set': {'c': 0}} ) self.assertEqual(update_result.modified_count, 2) self.assertEqual(update_result.matched_count, 2) self.assertIsNone(update_result.upserted_id) self.assert_documents([{'a': 1, 'c': 0}, {'a': 1, 'c': 0}, {'a': 2, 'c': 4}]) def test__update_many_collation(self): self.db.collection.insert_many([ {'a': 1, 'c': 2}, {'a': 1, 'c': 3}, {'a': 2, 'c': 4} ]) self.db.collection.update_many( filter={'a': 1}, update={'$set': {'c': 0}}, collation=None, ) with self.assertRaises(NotImplementedError): self.db.collection.update_many( filter={'a': 1}, update={'$set': {'c': 0}}, collation='fr', ) def test__update_many_array_filters(self): self.db.collection.insert_many([ {'a': 1, 'c': [2, 5, 6]}, {'a': 1, 'c': [3, 4, 5]}, {'a': 2, 'c': [12, 15]} ]) self.db.collection.update_many( filter={'a': 1}, update={'$set': {'a': 0}}, array_filters=None, ) with self.assertRaises(NotImplementedError): self.db.collection.update_many( filter={'a': 1}, update={'$set': {'c.$[e]': 0}}, array_filters=[{'e': {'$lt': 5}}], ) def test__update_many_let(self): self.db.collection.insert_many([ {'a': 1, 'c': 2}, {'a': 1, 'c': 3}, {'a': 2, 'c': 4} ]) self.db.collection.update_many( filter={'a': 1}, update={'$set': {'c': '$$newValue'}}, let=None, ) with self.assertRaises(NotImplementedError): self.db.collection.update_many( filter={'a': 1}, update={'$set': {'c': '$$newValue'}}, let={'newValue': 0}, ) def test__update_many_upsert(self): self.assert_document_count(0) update_result = self.db.collection.update_many( filter={'a': 1}, update={'$set': {'a': 1, 'c': 0}}, upsert=True ) self.assertEqual(update_result.modified_count, 0) self.assertEqual(update_result.matched_count, 0) self.assertIsNotNone(update_result.upserted_id) self.assert_document_stored(update_result.upserted_id, {'a': 1, 'c': 0}) def test__update_non_json_values(self): self.db.collection.insert_one({'a': collections.Counter({'b': 1})}) self.assertEqual({'b': 1}, self.db.collection.find_one()['a']) self.db.collection.update_one({}, {'$set': {'a': collections.Counter({'b': 2})}}) self.assertEqual({'b': 2}, self.db.collection.find_one()['a']) def test__update_push_slice_from_the_end(self): self.db.collection.insert_one({'scores': [40, 50, 60]}) self.db.collection.update_one({}, {'$push': {'scores': { '$each': [80, 78, 86], '$slice': -5, }}}) self.assertEqual([50, 60, 80, 78, 86], self.db.collection.find_one()['scores']) def test__update_push_slice_from_the_front(self): self.db.collection.insert_one({'scores': [89, 90]}) self.db.collection.update_one({}, {'$push': {'scores': { '$each': [100, 20], '$slice': 3, }}}) self.assertEqual([89, 90, 100], self.db.collection.find_one()['scores']) def test__update_push_slice_to_zero(self): self.db.collection.insert_one({'scores': [40, 50, 60]}) self.db.collection.update_one({}, {'$push': {'scores': { '$each': [80, 78, 86], '$slice': 0, }}}) self.assertEqual([], self.db.collection.find_one()['scores']) def test__update_push_slice_only(self): self.db.collection.insert_one({'scores': [89, 70, 100, 20]}) self.db.collection.update_one({}, {'$push': {'scores': { '$each': [], '$slice': -3, }}}) self.assertEqual([70, 100, 20], self.db.collection.find_one()['scores']) def test__update_push_slice_nested_field(self): self.db.collection.insert_one({'games': [{'scores': [89, 70, 100, 20]}]}) self.db.collection.update_one({}, {'$push': {'games.0.scores': { '$each': [15], '$slice': -3, }}}) self.assertEqual([100, 20, 15], self.db.collection.find_one()['games'][0]['scores']) def test__update_push_slice_positional_nested_field(self): self.db.collection.insert_one({'games': [{'scores': [0, 1]}, {'scores': [2, 3]}]}) self.db.collection.update_one( {'games': {'$elemMatch': {'scores.0': 2}}}, {'$push': {'games.$.scores': { '$each': [15], '$slice': -2, }}}) self.assertEqual([0, 1], self.db.collection.find_one()['games'][0]['scores']) self.assertEqual([3, 15], self.db.collection.find_one()['games'][1]['scores']) def test__update_push_sort(self): self.db.collection.insert_one( {'a': {'b': [{'value': 3}, {'value': 1}, {'value': 2}]}}) self.db.collection.update_one({}, {'$push': {'a.b': { '$each': [{'value': 4}], '$sort': {'value': 1}, }}}) self.assertEqual( {'b': [{'value': 1}, {'value': 2}, {'value': 3}, {'value': 4}]}, self.db.collection.find_one()['a']) def test__update_push_sort_document(self): self.db.collection.insert_one({'a': {'b': [3, 1, 2]}}) self.db.collection.update_one({}, {'$push': {'a.b': { '$each': [4, 5], '$sort': -1, }}}) self.assertEqual({'b': [5, 4, 3, 2, 1]}, self.db.collection.find_one()['a']) def test__update_push_position(self): self.db.collection.insert_one( {'a': {'b': [{'value': 3}, {'value': 1}, {'value': 2}]}}) self.db.collection.update_one({}, {'$push': {'a.b': { '$each': [{'value': 4}], '$position': 1, }}}) self.assertEqual( {'b': [{'value': 3}, {'value': 4}, {'value': 1}, {'value': 2}]}, self.db.collection.find_one()['a']) def test__update_push_negative_position(self): self.db.collection.insert_one( {'a': {'b': [{'value': 3}, {'value': 1}, {'value': 2}]}}) self.db.collection.update_one({}, {'$push': {'a.b': { '$each': [{'value': 4}], '$position': -2, }}}) self.assertEqual( {'b': [{'value': 3}, {'value': 4}, {'value': 1}, {'value': 2}]}, self.db.collection.find_one()['a']) def test__update_push_other_clauses(self): self.db.collection.insert_one({'games': [{'scores': [0, 1]}, {'scores': [2, 3]}]}) with self.assertRaises(mongomock.WriteError): self.db.collection.update_one( {'games': {'$elemMatch': {'scores.0': 2}}}, {'$push': {'games.$.scores': { '$each': [15, 13], '$a_clause_that_does_not_exit': 1, }}}) def test__update_push_positional_nested_field(self): self.db.collection.insert_one({'games': [{}]}) self.db.collection.update_one( {'games': {'$elemMatch': {'player.scores': {'$exists': False}}}}, {'$push': {'games.$.player.scores': 15}}) self.assertEqual([{'player': {'scores': [15]}}], self.db.collection.find_one()['games']) def test__update_push_array_of_arrays(self): self.db.collection.insert_one({'games': [[0], [1]]}) self.db.collection.update_one( {'games': {'$elemMatch': {'0': 1}}}, {'$push': {'games.$': 15}}) self.assertEqual([[0], [1, 15]], self.db.collection.find_one()['games']) def test__update_pull_filter_operator(self): self.db.collection.insert_one({'b': 0, 'arr': [0, 1, 2, 3, 4]}) self.db.collection.update_one({}, {'$pull': {'arr': {'$gt': 2}}}) self.assertEqual({'b': 0, 'arr': [0, 1, 2]}, self.db.collection.find_one({}, {'_id': 0})) def test__update_pull_filter_operator_on_subdocs(self): self.db.collection.insert_one({'arr': [{'size': 0}, {'size': 1}]}) self.db.collection.update_one({}, {'$pull': {'arr': {'size': {'$gte': 1}}}}) self.assertEqual({'arr': [{'size': 0}]}, self.db.collection.find_one({}, {'_id': 0})) def test__update_pull_in(self): self.db.collection.insert_one({'b': 0, 'arr': ['a1', 'a2']}) self.db.collection.update_one({}, {'$pull': {'arr': {'$in': ['a1']}}}) self.assertEqual({'b': 0, 'arr': ['a2']}, self.db.collection.find_one({}, {'_id': 0})) def test__update_pull_in_nested(self): self.db.collection.insert_one({'food': { 'fruits': ['apples', 'pears', 'oranges', 'grapes', 'bananas'], 'vegetables': ['carrots', 'celery', 'squash', 'carrots'], }}) self.db.collection.update_one({}, {'$pull': { 'food.fruits': {'$in': ['apples', 'oranges']}, 'food.vegetables': 'carrots', }}) self.assertEqual({'food': { 'fruits': ['pears', 'grapes', 'bananas'], 'vegetables': ['celery', 'squash'], }}, self.db.collection.find_one({}, {'_id': 0})) def test__update_pop(self): self.db.collection.insert_one({'name': 'bob', 'hat': ['green', 'tall']}) self.db.collection.update_one({'name': 'bob'}, {'$pop': {'hat': 1}}) res = self.db.collection.find_one({'name': 'bob'}) self.assertEqual(['green'], res['hat']) def test__update_pop_negative_index(self): self.db.collection.insert_one({'name': 'bob', 'hat': ['green', 'tall']}) self.db.collection.update_one({'name': 'bob'}, {'$pop': {'hat': -1}}) res = self.db.collection.find_one({'name': 'bob'}) self.assertEqual(['tall'], res['hat']) def test__update_pop_large_index(self): self.db.collection.insert_one({'name': 'bob', 'hat': [['green', 'tall']]}) self.db.collection.update_one({'name': 'bob'}, {'$pop': {'hat.1': 1}}) res = self.db.collection.find_one({'name': 'bob'}) self.assertEqual([['green', 'tall']], res['hat']) def test__update_pop_empty(self): self.db.collection.insert_one({'name': 'bob', 'hat': []}) self.db.collection.update_one({'name': 'bob'}, {'$pop': {'hat': 1}}) res = self.db.collection.find_one({'name': 'bob'}) self.assertEqual([], res['hat']) def test__replace_one(self): self.db.collection.insert_one({'a': 1, 'b': 2}) self.assert_documents([{'a': 1, 'b': 2}]) result = self.db.collection.replace_one( filter={'a': 2}, replacement={'x': 1, 'y': 2} ) self.assert_documents([{'a': 1, 'b': 2}]) self.assertEqual(result.matched_count, 0) self.assertEqual(result.modified_count, 0) result = self.db.collection.replace_one( filter={'a': 1}, replacement={'x': 1, 'y': 2} ) self.assert_documents([{'x': 1, 'y': 2}]) self.assertEqual(result.matched_count, 1) self.assertEqual(result.modified_count, 1) def test__replace_one_upsert(self): self.assert_document_count(0) result = self.db.collection.replace_one( filter={'a': 2}, replacement={'x': 1, 'y': 2}, upsert=True ) self.assertEqual(result.matched_count, 0) self.assertEqual(result.modified_count, 0) self.assertIsNotNone(result.upserted_id) self.assert_document_stored(result.upserted_id, {'x': 1, 'y': 2}) def test__replace_one_invalid(self): with self.assertRaises(ValueError): self.db.collection.replace_one( filter={'a': 2}, replacement={'$set': {'x': 1, 'y': 2}}) def test__update_one_invalid(self): with self.assertRaises(ValueError): self.db.collection.update_one({'a': 2}, {}) def test__delete_one(self): self.assert_document_count(0) self.db.collection.insert_one({'a': 1}) self.assert_document_count(1) self.db.collection.delete_one({'a': 2}) self.assert_document_count(1) self.db.collection.delete_one({'a': 1}) self.assert_document_count(0) def test__delete_one_invalid_filter(self): with self.assertRaises(TypeError): self.db.collection.delete_one('a') with self.assertRaises(TypeError): self.db.collection.delete_one(['a']) def test__delete_many(self): self.db.collection.insert_many([ {'a': 1, 'c': 2}, {'a': 1, 'c': 3}, {'a': 2, 'c': 4} ]) self.assert_document_count(3) self.db.collection.delete_many({'a': 2}) self.assert_document_count(2) self.db.collection.delete_many({'a': 1}) self.assert_document_count(0) def test__delete_many_collation_option(self): """Ensure collation delete_many's option is not rejected.""" self.assertTrue(self.db.collection.delete_many({}, collation=None)) with self.assertRaises(NotImplementedError): self.db.collection.delete_many({}, collation='fr') def test__delete_many_hint_option(self): """Ensure hint delete_many's option is not rejected.""" self.assertTrue(self.db.collection.delete_many({}, hint=None)) with self.assertRaises(NotImplementedError): self.db.collection.delete_many({}, hint='_index') def test__string_matching(self): """Make sure strings are not treated as collections on find""" self.db['abc'].insert_one({'name': 'test1'}) self.db['abc'].insert_one({'name': 'test2'}) # now searching for 'name':'e' returns test1 self.assertIsNone(self.db['abc'].find_one({'name': 'e'})) def test__collection_is_indexable(self): self.db['def'].insert_one({'name': 'test1'}) self.assertEqual(self.db['def'].find({'name': 'test1'})[0]['name'], 'test1') def test__cursor_distinct(self): larry_bob = {'name': 'larry'} larry = {'name': 'larry'} gary = {'name': 'gary'} self.db['coll_name'].insert_many([larry_bob, larry, gary]) ret_val = self.db['coll_name'].find().distinct('name') self.assertIsInstance(ret_val, list) self.assertTrue(set(ret_val) == set(['larry', 'gary'])) def test__cursor_limit(self): self.db.collection.insert_many([{'a': i} for i in range(100)]) cursor = self.db.collection.find().limit(30) first_ones = list(cursor) self.assertEqual(30, len(first_ones)) def test__cursor_negative_limit(self): self.db.collection.insert_many([{'a': i} for i in range(100)]) cursor = self.db.collection.find().limit(-30) first_ones = list(cursor) self.assertEqual(30, len(first_ones)) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'count was removed in pymongo v4') def test__cursor_count_with_limit(self): first = {'name': 'first'} second = {'name': 'second'} third = {'name': 'third'} self.db['coll_name'].insert_many([first, second, third]) count = self.db['coll_name'].find().limit( 2).count(with_limit_and_skip=True) self.assertEqual(count, 2) count = self.db['coll_name'].find().limit( 0).count(with_limit_and_skip=True) self.assertEqual(count, 3) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'count was removed in pymongo v4') def test__cursor_count_with_skip(self): first = {'name': 'first'} second = {'name': 'second'} third = {'name': 'third'} self.db['coll_name'].insert_many([first, second, third]) count = self.db['coll_name'].find().skip( 1).count(with_limit_and_skip=True) self.assertEqual(count, 2) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'count was removed in pymongo v4') def test__cursor_count_with_skip_init(self): first = {'name': 'first'} second = {'name': 'second'} third = {'name': 'third'} self.db['coll_name'].insert_many([first, second, third]) count = self.db['coll_name'].find(skip=1).count(with_limit_and_skip=True) self.assertEqual(count, 2) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'count was removed in pymongo v4') def test__cursor_count_when_db_changes(self): self.db['coll_name'].insert_one({}) cursor = self.db['coll_name'].find() self.db['coll_name'].insert_one({}) self.assertEqual(cursor.count(), 2) def test__cursor_getitem_when_db_changes(self): self.db['coll_name'].insert_one({}) cursor = self.db['coll_name'].find() self.db['coll_name'].insert_one({}) cursor_items = [x for x in cursor] self.assertEqual(len(cursor_items), 2) def test__cursor_getitem(self): first = {'name': 'first'} second = {'name': 'second'} third = {'name': 'third'} self.db['coll_name'].insert_many([first, second, third]) cursor = self.db['coll_name'].find() item = cursor[0] self.assertEqual(item['name'], 'first') def test__cursor_getitem_slice(self): first = {'name': 'first'} second = {'name': 'second'} third = {'name': 'third'} self.db['coll_name'].insert_many([first, second, third]) cursor = self.db['coll_name'].find() ret = cursor[1:4] self.assertIs(ret, cursor) count = sum(1 for d in cursor) self.assertEqual(count, 2) def test__cursor_getitem_negative_index(self): first = {'name': 'first'} second = {'name': 'second'} third = {'name': 'third'} self.db['coll_name'].insert_many([first, second, third]) cursor = self.db['coll_name'].find() with self.assertRaises(IndexError): cursor[-1] # pylint: disable=pointless-statement def test__cursor_getitem_bad_index(self): first = {'name': 'first'} second = {'name': 'second'} third = {'name': 'third'} self.db['coll_name'].insert_many([first, second, third]) cursor = self.db['coll_name'].find() with self.assertRaises(TypeError): cursor['not_a_number'] # pylint: disable=pointless-statement def test__find_with_skip_param(self): """Make sure that find() will take in account skip parameter""" u1 = {'name': 'first'} u2 = {'name': 'second'} self.db['users'].insert_many([u1, u2]) count = sum(1 for d in self.db['users'].find(sort=[('name', 1)], skip=1)) self.assertEqual(1, count) self.assertEqual( self.db['users'].find( sort=[ ('name', 1)], skip=1)[0]['name'], 'second') def test__ordered_insert_find(self): """Tests ordered inserts If we insert values 1, 2, 3 and find them, we must see them in order as we inserted them. """ values = list(range(20)) random.shuffle(values) for val in values: self.db.collection.insert_one({'_id': val}) find_cursor = self.db.collection.find() for val in values: in_db_val = find_cursor.next() expected = {'_id': val} self.assertEqual(in_db_val, expected) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_uniq_idxs_with_ascending_ordering(self): self.db.collection.create_index([('value', pymongo.ASCENDING)], unique=True) self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1}) self.assertEqual(self.db.collection.count_documents({}), 1) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_uniq_idxs_with_descending_ordering(self): self.db.collection.create_index([('value', pymongo.DESCENDING)], unique=True) self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__create_uniq_idxs_without_ordering(self): self.db.collection.create_index([('value', 1)], unique=True) self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__create_index_duplicate(self): self.db.collection.create_index([('value', 1)]) self.db.collection.create_index([('value', 1)]) with self.assertRaises(mongomock.OperationFailure): self.db.collection.create_index([('value', 1)], unique=True) def test__create_index_wrong_type(self): with self.assertRaises(TypeError): self.db.collection.create_index({'value': 1}) with self.assertRaises(TypeError): self.db.collection.create_index([('value', 1, 'foo', 'bar')]) def test__ttl_index_ignores_record_in_the_future(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'value': datetime.utcnow() + timedelta(seconds=100)}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ttl_index_ignores_records_with_non_datetime_values(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'value': 'not a dt'}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ttl_index_record_expiry(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=5) self.db.collection.insert_one({'value': datetime.utcnow() - timedelta(seconds=5)}) self.assertEqual(self.db.collection.count_documents({}), 0) def test__ttl_expiration_of_0(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'value': datetime.utcnow()}) self.assertEqual(self.db.collection.count_documents({}), 0) def test__ttl_with_non_integer_value_is_ignored(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds='a') self.db.collection.insert_one({'value': datetime.utcnow()}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ttl_applied_to_compound_key_is_ignored(self): self.db.collection.create_index([('field1', 1), ('field2', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'field1': datetime.utcnow(), 'field2': 'val2'}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ttl_ignored_when_document_does_not_contain_indexed_field(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'other_value': datetime.utcnow()}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ttl_of_array_field_expiration(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=5) self.db.collection.insert_one({ 'value': [ 'a', 'b', datetime.utcnow() + timedelta(seconds=100) ] }) self.assertEqual(self.db.collection.count_documents({}), 1) self.db.collection.drop() self.db.collection.create_index([('value', 1)], expireAfterSeconds=5) self.db.collection.insert_one({ 'value': [ 'a', 'b', datetime.utcnow() - timedelta(seconds=5), datetime.utcnow() + timedelta(seconds=100) ] }) self.assertEqual(self.db.collection.count_documents({}), 0) def test__ttl_of_array_field_without_datetime_does_not_expire(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=5) self.db.collection.insert_one({'value': ['a', 'b', 'c', 1, 2, 3]}) self.assertEqual(self.db.collection.count_documents({}), 1) @skipIf(not _HAVE_MOCK, 'mock not installed') def test__ttl_expiry_with_mock(self): now = datetime.utcnow() self.db.collection.create_index([('value', 1)], expireAfterSeconds=100) self.db.collection.insert_one({'value': now + timedelta(seconds=100)}) self.assertEqual(self.db.collection.count_documents({}), 1) with mock.patch('mongomock.utcnow') as mongomock_utcnow: mongomock_utcnow.return_value = now + timedelta(100) self.assertEqual(self.db.collection.count_documents({}), 0) def test__ttl_index_is_removed_if_collection_dropped(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'value': datetime.utcnow()}) self.assertEqual(self.db.collection.count_documents({}), 0) self.db.collection.drop() self.db.collection.insert_one({'value': datetime.utcnow()}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ttl_index_is_removed_when_index_is_dropped(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'value': datetime.utcnow()}) self.assertEqual(self.db.collection.count_documents({}), 0) self.db.collection.drop_index('value_1') self.db.collection.insert_one({'value': datetime.utcnow()}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ttl_index_removes_expired_documents_prior_to_removal(self): self.db.collection.create_index([('value', 1)], expireAfterSeconds=0) self.db.collection.insert_one({'value': datetime.utcnow()}) self.db.collection.drop_index('value_1') self.assertEqual(self.db.collection.count_documents({}), 0) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_indexes_with_expireAfterSeconds(self): indexes = [ pymongo.operations.IndexModel([('value', pymongo.ASCENDING)], expireAfterSeconds=5), ] index_names = self.db.collection.create_indexes(indexes) self.assertEqual(1, len(index_names)) self.db.collection.insert_one({'value': datetime.utcnow() - timedelta(seconds=5)}) self.assertEqual(self.db.collection.count_documents({}), 0) def test__create_indexes_wrong_type(self): indexes = [('value', 1), ('name', 1)] with self.assertRaises(TypeError): self.db.collection.create_indexes(indexes) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_indexes_uniq_idxs(self): indexes = [ pymongo.operations.IndexModel([('value', pymongo.ASCENDING)], unique=True), pymongo.operations.IndexModel([('name', pymongo.ASCENDING)], unique=True) ] index_names = self.db.collection.create_indexes(indexes) self.assertEqual(2, len(index_names)) self.db.collection.insert_one({'value': 1, 'name': 'bob'}) # Ensure both uniq indexes have been created with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1, 'name': 'different'}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 0, 'name': 'bob'}) self.assertEqual(self.db.collection.count_documents({}), 1) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_indexes_names(self): indexes = [ pymongo.operations.IndexModel([('value', pymongo.ASCENDING)], name='index_name'), pymongo.operations.IndexModel([('name', pymongo.ASCENDING)], unique=True) ] index_names = self.db.collection.create_indexes(indexes) self.assertEqual(['index_name', 'name_1'], index_names) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__ensure_uniq_idxs_with_ascending_ordering(self): self.db.collection.create_index([('value', pymongo.ASCENDING)], unique=True) self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1}) self.assertEqual(self.db.collection.count_documents({}), 1) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__ensure_uniq_idxs_with_descending_ordering(self): self.db.collection.create_index([('value', pymongo.DESCENDING)], unique=True) self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__ensure_uniq_idxs_on_nested_field(self): self.db.collection.create_index([('a.b', 1)], unique=True) self.db.collection.insert_one({'a': 1}) self.db.collection.insert_one({'a': {'b': 1}}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'a': {'b': 1}}) self.assertEqual(self.db.collection.count_documents({}), 2) def test__ensure_sparse_uniq_idxs_on_nested_field(self): self.db.collection.create_index([('a.b', 1)], unique=True, sparse=True) self.db.collection.create_index([('c', 1)], unique=True, sparse=True) self.db.collection.insert_one({}) self.db.collection.insert_one({}) self.db.collection.insert_one({'c': 1}) self.db.collection.insert_one({'a': 1}) self.db.collection.insert_one({'a': {'b': 1}}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'a': {'b': 1}}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'c': 1}) self.assertEqual(self.db.collection.count_documents({}), 5) def test__ensure_partial_filter_expression_unique_index(self): self.db.collection.delete_many({}) self.db.collection.create_index( (('partialFilterExpression_value', 1), ('value', 1)), unique=True, partialFilterExpression={'partialFilterExpression_value': {'$eq': 1}}) # We should be able to add documents with duplicated `value` and # `partialFilterExpression_value` if `partialFilterExpression_value` isn't set to 1 self.db.collection.insert_one({'partialFilterExpression_value': 3, 'value': 4}) self.db.collection.insert_one({'partialFilterExpression_value': 3, 'value': 4}) # We should be able to add documents with distinct `value` values and duplicated # `partialFilterExpression_value` value set to 1. self.db.collection.insert_one({'partialFilterExpression_value': 1, 'value': 2}) self.db.collection.insert_one({'partialFilterExpression_value': 1, 'value': 3}) # We should not be able to add documents with duplicated `partialFilterExpression_value` and # `value` values if `partialFilterExpression_value` is 1. with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'partialFilterExpression_value': 1, 'value': 3}) self.assertEqual(self.db.collection.count_documents({}), 4) def test__ensure_uniq_idxs_without_ordering(self): self.db.collection.create_index([('value', 1)], unique=True) self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__insert_empty_doc_uniq_idx(self): self.db.collection.create_index([('value', 1)], unique=True) self.db.collection.insert_one({'value': 1}) self.db.collection.insert_one({}) self.assertEqual(self.db.collection.count_documents({}), 2) def test__insert_empty_doc_twice_uniq_idx(self): self.db.collection.create_index([('value', 1)], unique=True) self.db.collection.insert_one({}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({}) self.assertEqual(self.db.collection.count_documents({}), 1) def test_sparse_unique_index(self): self.db.collection.create_index([('value', 1)], unique=True, sparse=True) self.db.collection.insert_one({}) self.db.collection.insert_one({}) self.db.collection.insert_one({'value': None}) self.db.collection.insert_one({'value': None}) self.assertEqual(self.db.collection.count_documents({}), 4) def test_unique_index_with_upsert_insertion(self): self.db.collection.create_index([('value', 1)], unique=True) self.db.collection.insert_one({'_id': 1, 'value': 1}) # Updating document should not trigger error self.db.collection.replace_one({'_id': 1}, {'value': 1}) self.db.collection.replace_one({'value': 1}, {'value': 1}, upsert=True) # Creating new documents with same value should with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.replace_one({'bad': 'condition'}, {'value': 1}, upsert=True) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'_id': 2, 'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.update_one({'_id': 2}, {'$set': {'value': 1}}, upsert=True) def test_unique_index_with_update(self): self.db.collection.create_index([('value', 1)], unique=True) self.db.collection.insert_one({'_id': 1, 'value': 1}) self.db.collection.insert_one({'_id': 2, 'value': 2}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.replace_one({'value': 1}, {'value': 2}) def test_unique_index_with_update_on_nested_field(self): self.db.collection.create_index([('a.b', 1)], unique=True) self.db.collection.insert_one({'_id': 1, 'a': {'b': 1}}) self.db.collection.insert_one({'_id': 2, 'a': {'b': 2}}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.update_one({'_id': 1}, {'$set': {'a.b': 2}}) def test_unique_index_on_dict(self): self.db.collection.insert_one({'_id': 1, 'a': {'b': 1}}) self.db.collection.insert_one({'_id': 2, 'a': {'b': 2}}) self.db.collection.create_index([('a', 1)], unique=True) self.db.collection.insert_one({'_id': 3, 'a': {'b': 3}}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'_id': 4, 'a': {'b': 2}}) def test_sparse_unique_index_dup(self): self.db.collection.create_index([('value', 1)], unique=True, sparse=True) self.db.collection.insert_one({'value': 'a'}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.insert_one({'value': 'a'}) self.assertEqual(self.db.collection.count_documents({}), 1) def test__create_uniq_idxs_with_dupes_already_there(self): self.db.collection.insert_one({'value': 1}) self.db.collection.insert_one({'value': 1}) with self.assertRaises(mongomock.DuplicateKeyError): self.db.collection.create_index([('value', 1)], unique=True) self.db.collection.insert_one({'value': 1}) self.assertEqual(self.db.collection.count_documents({}), 3) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_index_with_name(self): name = self.db.collection.create_index([('value', 1)], name='index_name') self.assertEqual('index_name', name) self.db.collection.create_index([('value', 1)], name='index_name') self.assertEqual({'_id_', 'index_name'}, set(self.db.collection.index_information().keys())) def test__insert_empty_doc_idx_information(self): self.db.collection.insert_one({}) index_information = self.db.collection.index_information() self.assertEqual( {'_id_': {'v': 2, 'key': [('_id', 1)]}}, index_information, ) self.assertEqual( [{'name': '_id_', 'key': {'_id': 1}, 'v': 2}], list(self.db.collection.list_indexes()), ) del index_information['_id_'] self.assertEqual( {'_id_': {'v': 2, 'key': [('_id', 1)]}}, self.db.collection.index_information(), msg='index_information is immutable', ) def test__empty_table_idx_information(self): self.db.collection.drop() index_information = self.db.collection.index_information() self.assertEqual({}, index_information) def test__create_idx_information(self): index = self.db.collection.create_index([('value', 1)]) self.db.collection.insert_one({}) self.assertDictEqual( { 'key': [('value', 1)], 'v': 2, }, self.db.collection.index_information()[index]) self.assertEqual({'_id_', index}, set(self.db.collection.index_information().keys())) self.db.collection.drop_index(index) self.assertEqual({'_id_'}, set(self.db.collection.index_information().keys())) def test__drop_index_not_found(self): with self.assertRaises(mongomock.OperationFailure): self.db.collection.drop_index('unknownIndex') @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_unique_idx_information_with_ascending_ordering(self): index = self.db.collection.create_index([('value', pymongo.ASCENDING)], unique=True) self.db.collection.insert_one({'value': 1}) self.assertDictEqual( { 'key': [('value', pymongo.ASCENDING)], 'unique': True, 'v': 2, }, self.db.collection.index_information()[index]) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__create_unique_idx_information_with_descending_ordering(self): index = self.db.collection.create_index([('value', pymongo.DESCENDING)], unique=True) self.db.collection.insert_one({'value': 1}) self.assertDictEqual( self.db.collection.index_information()[index], { 'key': [('value', pymongo.DESCENDING)], 'unique': True, 'v': 2, }) def test__set_with_positional_operator(self): """Real mongodb support positional operator $ for $set operation""" base_document = {'int_field': 1, 'list_field': [{'str_field': 'a'}, {'str_field': 'b'}, {'str_field': 'c'}]} self.db.collection.insert_one(base_document) self.db.collection.update_one( {'int_field': 1, 'list_field.str_field': 'b'}, {'$set': {'list_field.$.marker': True}}) expected_document = copy.deepcopy(base_document) expected_document['list_field'][1]['marker'] = True self.assertEqual(list(self.db.collection.find()), [expected_document]) self.db.collection.update_one( {'int_field': 1, 'list_field.str_field': 'a'}, {'$set': {'list_field.$.marker': True}}) self.db.collection.update_one( {'int_field': 1, 'list_field.str_field': 'c'}, {'$set': {'list_field.$.marker': True}}) expected_document['list_field'][0]['marker'] = True expected_document['list_field'][2]['marker'] = True self.assertEqual(list(self.db.collection.find()), [expected_document]) def test__set_replace_subdocument(self): base_document = { 'int_field': 1, 'list_field': [ {'str_field': 'a'}, {'str_field': 'b', 'int_field': 1}, {'str_field': 'c'} ]} new_subdoc = {'str_field': 'x'} self.db.collection.insert_one(base_document) self.db.collection.update_one( {'int_field': 1}, {'$set': {'list_field.1': new_subdoc}}) self.db.collection.update_one( {'int_field': 1, 'list_field.2.str_field': 'c'}, {'$set': {'list_field.2': new_subdoc}}) expected_document = copy.deepcopy(base_document) expected_document['list_field'][1] = new_subdoc expected_document['list_field'][2] = new_subdoc self.assertEqual(list(self.db.collection.find()), [expected_document]) def test__set_replace_subdocument_positional_operator(self): base_document = { 'int_field': 1, 'list_field': [ {'str_field': 'a'}, {'str_field': 'b', 'int_field': 1}, {'str_field': 'c'} ]} new_subdoc = {'str_field': 'x'} self.db.collection.insert_one(base_document) self.db.collection.update_one( {'int_field': 1, 'list_field.str_field': 'b'}, {'$set': {'list_field.$': new_subdoc}}) expected_document = copy.deepcopy(base_document) expected_document['list_field'][1] = new_subdoc self.assertEqual(list(self.db.collection.find()), [expected_document]) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION >= version.parse('4.0'), 'find_and_modify was removed in pymongo v4') def test__find_and_modify_with_sort(self): self.db.collection.insert_one({'time_check': float(time.time())}) self.db.collection.insert_one({'time_check': float(time.time())}) self.db.collection.insert_one({'time_check': float(time.time())}) start_check_time = float(time.time()) self.db.collection.find_and_modify( {'time_check': {'$lt': start_check_time}}, {'$set': {'time_check': float(time.time()), 'checked': True}}, sort=[('time_check', pymongo.ASCENDING)]) sorted_records = sorted(list(self.db.collection.find()), key=lambda x: x['time_check']) self.assertEqual(sorted_records[-1]['checked'], True) self.db.collection.find_and_modify( {'time_check': {'$lt': start_check_time}}, {'$set': {'time_check': float(time.time()), 'checked': True}}, sort=[('time_check', pymongo.ASCENDING)]) self.db.collection.find_and_modify( {'time_check': {'$lt': start_check_time}}, {'$set': {'time_check': float(time.time()), 'checked': True}}, sort=[('time_check', pymongo.ASCENDING)]) expected = list(filter(lambda x: 'checked' in x, list(self.db.collection.find()))) self.assertEqual(self.db.collection.count_documents({}), len(expected)) self.assertEqual( list(self.db.collection.find({'checked': True})), list(self.db.collection.find())) def test__cursor_sort_kept_after_clone(self): self.db.collection.insert_one({'time_check': float(time.time())}) self.db.collection.insert_one({'time_check': float(time.time())}) self.db.collection.insert_one({'time_check': float(time.time())}) cursor = self.db.collection.find({}, sort=[('time_check', -1)]) cursor2 = cursor.clone() cursor3 = self.db.collection.find({}) cursor3.sort([('time_check', -1)]) cursor4 = cursor3.clone() cursor_result = list(cursor) cursor2_result = list(cursor2) cursor3_result = list(cursor3) cursor4_result = list(cursor4) self.assertEqual(cursor2_result, cursor_result) self.assertEqual(cursor3_result, cursor_result) self.assertEqual(cursor4_result, cursor_result) @skipIf(sys.version_info < (3, 7), 'Older versions of Python cannot copy regex partterns') @skipIf( helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above do not specify uuid encoding') def test__sort_mixed_types(self): self.db.collection.insert_many([ {'type': 'bool', 'a': True}, {'type': 'datetime', 'a': datetime.now()}, {'type': 'dict', 'a': {'a': 1}}, {'type': 'emptyList', 'a': []}, {'type': 'int', 'a': 1}, {'type': 'listOfList', 'a': [[1, 2], [3, 4]]}, {'type': 'missing'}, {'type': 'None', 'a': None}, {'type': 'ObjectId', 'a': ObjectId()}, {'type': 'regex', 'a': re.compile('a')}, {'type': 'repeatedInt', 'a': [1, 2]}, {'type': 'string', 'a': 'a'}, {'type': 'tupleOfTuple', 'a': ((1, 2), (3, 4))}, {'type': 'uuid', 'a': uuid.UUID(int=3)}, ]) cursor = self.db.collection.find({}, sort=[('a', 1), ('type', 1)]) self.assertEqual([ 'emptyList', 'None', 'missing', 'int', 'repeatedInt', 'string', 'dict', 'listOfList', 'tupleOfTuple', 'uuid', 'ObjectId', 'bool', 'datetime', 'regex', ], [doc['type'] for doc in cursor]) @skipIf( helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above do not specify uuid encoding') def test__sort_by_uuid(self): self.db.collection.insert_many([ {'_id': uuid.UUID(int=3), 'timestamp': 99, 'a': 1}, {'_id': uuid.UUID(int=1), 'timestamp': 100, 'a': 2}, {'_id': uuid.UUID(int=2), 'timestamp': 100, 'a': 3}, ]) cursor = self.db.collection.find({}, sort=[('timestamp', 1), ('_id', 1)]) self.assertEqual([1, 2, 3], [doc['a'] for doc in cursor]) def test__avoid_change_data_after_set(self): test_data = {'test': ['test_data']} self.db.collection.insert_one({'_id': 1}) self.db.collection.update_one({'_id': 1}, {'$set': test_data}) self.db.collection.update_one( {'_id': 1}, {'$addToSet': {'test': 'another_one'}}) data_in_db = self.db.collection.find_one({'_id': 1}) self.assertNotEqual(data_in_db['test'], test_data['test']) self.assertEqual(len(test_data['test']), 1) self.assertEqual(len(data_in_db['test']), 2) def test__filter_with_ne(self): self.db.collection.insert_one({'_id': 1, 'test_list': [{'data': 'val'}]}) data_in_db = self.db.collection.find( {'test_list.marker_field': {'$ne': True}}) self.assertEqual( list(data_in_db), [{'_id': 1, 'test_list': [{'data': 'val'}]}]) def test__filter_with_ne_none(self): self.db.collection.insert_many([ {'_id': 1, 'field1': 'baz', 'field2': 'bar'}, {'_id': 2, 'field1': 'baz'}, {'_id': 3, 'field1': 'baz', 'field2': None}, {'_id': 4, 'field1': 'baz', 'field2': False}, {'_id': 5, 'field1': 'baz', 'field2': 0}, ]) data_in_db = self.db.collection.find({'field1': 'baz', 'field2': {'$ne': None}}) self.assertEqual([1, 4, 5], [d['_id'] for d in data_in_db]) def test__filter_unknown_top_level(self): with self.assertRaises(mongomock.OperationFailure) as error: self.db.collection.find_one({'$and': [{'$ne': False}]}) self.assertEqual('unknown top level operator: $ne', str(error.exception)) def test__filter_unknown_op(self): with self.assertRaises(mongomock.OperationFailure) as error: self.db.collection.find_one({'a': {'$foo': 3}}) self.assertEqual('unknown operator: $foo', str(error.exception)) def test__filter_on_dict(self): self.db.collection.insert_one({'doc': {}}) self.assertTrue(self.db.collection.find_one({'doc': {}})) def test__find_or(self): self.db.collection.insert_many([ {'x': 4}, {'x': [2, 4, 6, 8]}, {'x': [2, 3, 5, 7]}, {'x': {}}, ]) self.assertEqual( [4, [2, 4, 6, 8], [2, 3, 5, 7]], [d['x'] for d in self.db.collection.find({'$or': [{'x': 4}, {'x': 2}]})]) def test__find_with_max_time_ms(self): self.db.collection.insert_many([{'x': 1}, {'x': 2}]) self.assertEqual( [1, 2], [d['x'] for d in self.db.collection.find({}, max_time_ms=1000)]) with self.assertRaises(TypeError): self.db.collection.find({}, max_time_ms='1000') def test__find_and_project_3_level_deep_nested_field(self): self.db.collection.insert_one({'_id': 1, 'a': {'b': {'c': 2}}}) data_in_db = self.db.collection.find(projection=['a.b.c']) self.assertEqual( list(data_in_db), [{'_id': 1, 'a': {'b': {'c': 2}}}]) def test__find_and_project_wrong_types(self): self.db.collection.insert_one({'_id': 1, 'a': {'b': {'c': 2}}}) with self.assertRaises(TypeError): self.db.collection.find_one({}, projection=[{'a': {'b': {'c': 1}}}]) def test__find_projection_with_subdoc_lists(self): doc = {'a': 1, 'b': [{'c': 2, 'd': 3, 'e': 4}, {'c': 5, 'd': 6, 'e': 7}]} self.db.collection.insert_one(doc) result = self.db.collection.find_one({'a': 1}, {'a': 1, 'b': 1}) self.assertEqual(result, doc) result = self.db.collection.find_one({'a': 1}, {'_id': 0, 'a': 1, 'b.c': 1, 'b.d': 1}) self.assertEqual(result, {'a': 1, 'b': [{'c': 2, 'd': 3}, {'c': 5, 'd': 6}]}) result = self.db.collection.find_one({'a': 1}, {'_id': 0, 'a': 0, 'b.c': 0, 'b.e': 0}) self.assertEqual(result, {'b': [{'d': 3}, {'d': 6}]}) # Test that a projection that does not fit the document does not result in an error result = self.db.collection.find_one({'a': 1}, {'_id': 0, 'a': 1, 'b.c.f': 1}) self.assertEqual(result, {'a': 1, 'b': [{}, {}]}) def test__find_projection_with_subdoc_lists_refinements(self): doc = {'a': 1, 'b': [{'c': 2, 'd': 3, 'e': 4}, {'c': 5, 'd': 6, 'e': 7}]} self.db.collection.insert_one(doc) with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one( {'a': 1}, collections.OrderedDict([('a', 1), ('b.c', 1), ('b', 1)])) with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one( {'a': 1}, collections.OrderedDict([('_id', 0), ('a', 1), ('b', 1), ('b.c', 1)])) with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one( {'a': 1}, collections.OrderedDict([('_id', 0), ('a', 0), ('b', 0), ('b.c', 0)])) # This one is tricky: the refinement 'b' overrides the previous 'b.c' # but it is not the equivalent of having only 'b'. with self.assertRaises(NotImplementedError): self.db.collection.find_one( {'a': 1}, collections.OrderedDict([('_id', 0), ('a', 0), ('b.c', 0), ('b', 0)])) def test__find_and_project(self): self.db.collection.insert_one({'_id': 1, 'a': 42, 'b': 'other', 'c': {'d': 'nested'}}) self.assertEqual( [{'_id': 1, 'a': 42}], list(self.db.collection.find({}, projection={'a': 1}))) self.assertEqual( [{'_id': 1, 'a': 42}], list(self.db.collection.find({}, projection={'a': '1'}))) self.assertEqual( [{'_id': 1, 'a': 42}], list(self.db.collection.find({}, projection={'a': '0'}))) self.assertEqual( [{'_id': 1, 'a': 42}], list(self.db.collection.find({}, projection={'a': 'other'}))) self.assertEqual( [{'_id': 1, 'b': 'other', 'c': {'d': 'nested'}}], list(self.db.collection.find({}, projection={'a': 0}))) self.assertEqual( [{'_id': 1, 'b': 'other', 'c': {'d': 'nested'}}], list(self.db.collection.find({}, projection={'a': False}))) def test__find_and_project_positional(self): self.db.collection.insert_one({'_id': 1, 'a': [{'b': 1}, {'b': 2}]}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'a.b': {'$exists': True}}, projection={'a.$.b': 0}) with self.assertRaises(NotImplementedError): self.db.collection.find_one({'a.b': {'$exists': True}}, projection={'a.$.b': 1}) def test__find_dict_in_nested_list(self): self.db.collection.insert_one({'a': {'b': [{'c': 1}]}}) self.assertTrue(self.db.collection.find_one({'a.b': {'c': 1}})) def test__find_in_not_a_list(self): self.db.collection.insert_one({'a': 'a'}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'a': {'$in': 'not a list'}}) def test__with_options(self): self.db.collection.with_options(read_preference=None) self.db.collection.with_options(write_concern=self.db.collection.write_concern) self.db.collection.with_options(write_concern=WriteConcern(w=1)) self.db.collection.with_options(read_concern=self.db.collection.read_concern) self.db.collection.with_options(read_concern=ReadConcern(level='local')) def test__with_options_different_write_concern(self): self.db.collection.insert_one({'name': 'col1'}) col2 = self.db.collection.with_options(write_concern=WriteConcern(w=2)) col2.insert_one({'name': 'col2'}) # Check that the two objects have the same data. self.assertEqual({'col1', 'col2'}, {d['name'] for d in self.db.collection.find()}) self.assertEqual({'col1', 'col2'}, {d['name'] for d in col2.find()}) # Check that each object has its own write concern. self.assertEqual({}, self.db.collection.write_concern.document) self.assertNotEqual(self.db.collection.write_concern, col2.write_concern) self.assertEqual({'w': 2}, col2.write_concern.document) def test__with_options_different_read_concern(self): self.db.collection.insert_one({'name': 'col1'}) col2 = self.db.collection.with_options(read_concern=ReadConcern(level='majority')) col2.insert_one({'name': 'col2'}) # Check that the two objects have the same data. self.assertEqual({'col1', 'col2'}, {d['name'] for d in self.db.collection.find()}) self.assertEqual({'col1', 'col2'}, {d['name'] for d in col2.find()}) # Check that each object has its own read concern. self.assertEqual({}, self.db.collection.read_concern.document) self.assertNotEqual(self.db.collection.read_concern, col2.read_concern) self.assertEqual({'level': 'majority'}, col2.read_concern.document) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__with_options_different_read_preference(self): self.db.collection.insert_one({'name': 'col1'}) col2 = self.db.collection.with_options(read_preference=ReadPreference.NEAREST) col2.insert_one({'name': 'col2'}) # Check that the two objects have the same data. self.assertEqual({'col1', 'col2'}, {d['name'] for d in self.db.collection.find()}) self.assertEqual({'col1', 'col2'}, {d['name'] for d in col2.find()}) # Check that each object has its own read preference self.assertEqual('primary', self.db.collection.read_preference.mongos_mode) self.assertNotEqual(self.db.collection.read_preference, col2.read_preference) self.assertEqual('nearest', col2.read_preference.mongos_mode) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__codec_options(self): self.assertEqual(codec_options.CodecOptions(), self.db.collection.codec_options) self.db.collection.with_options(codec_options.CodecOptions()) def test__codec_options_without_pymongo(self): self.assertEqual(self.db.collection.codec_options, self.db.codec_options) def test__with_options_wrong_kwarg(self): self.assertRaises(TypeError, self.db.collection.with_options, red_preference=None) def test__with_options_not_implemented(self): _CodecOptions = collections.namedtuple( 'CodecOptions', ['document_class', 'tz_aware', 'uuid_representation']) with self.assertRaises(NotImplementedError): self.db.collection.with_options(codec_options=_CodecOptions(None, True, 3)) def test__with_options_wrong_type(self): with self.assertRaises(TypeError): self.db.collection.with_options(write_concern=1) def test__update_current_date(self): for type_specification in [True, {'$type': 'date'}]: self.db.collection.update_one( {}, {'$currentDate': {'updated_at': type_specification}}, upsert=True) self.assertIsInstance( self.db.collection.find_one({})['updated_at'], datetime) def test_datetime_precision(self): too_precise_dt = datetime(2000, 1, 1, 12, 30, 30, 123456) mongo_dt = datetime(2000, 1, 1, 12, 30, 30, 123000) objid = self.db.collection.insert_one( {'date_too_precise': too_precise_dt, 'date': mongo_dt}).inserted_id self.assert_document_count(1) # Given both date are equivalent, we can mix them self.db.collection.update_one( {'date_too_precise': mongo_dt, 'date': too_precise_dt}, {'$set': {'new_date_too_precise': too_precise_dt, 'new_date': mongo_dt}}, upsert=True ) self.assert_document_count(1) doc = self.db.collection.find_one({ 'new_date_too_precise': mongo_dt, 'new_date': too_precise_dt}) assert doc == { '_id': objid, 'date_too_precise': mongo_dt, 'date': mongo_dt, 'new_date_too_precise': mongo_dt, 'new_date': mongo_dt } self.db.collection.delete_one({ 'new_date_too_precise': mongo_dt, 'new_date': too_precise_dt}) self.assert_document_count(0) def test__mix_tz_naive_aware(self): utc2tz = UTCPlus2() naive = datetime(1999, 12, 31, 22) aware = datetime(2000, 1, 1, tzinfo=utc2tz) self.db.collection.insert_one({'date_aware': aware, 'date_naive': naive}) self.assert_document_count(1) # Given both date are equivalent, we can mix them self.db.collection.update_one( {'date_aware': naive, 'date_naive': aware}, {'$set': {'new_aware': aware, 'new_naive': naive}}, upsert=True ) self.assert_document_count(1) self.db.collection.find_one({'new_aware': naive, 'new_naive': aware}) self.db.collection.delete_one({'new_aware': naive, 'new_naive': aware}) self.assert_document_count(0) def test__configure_client_tz_aware(self): for tz_awarness in (True, False): client = mongomock.MongoClient(tz_aware=tz_awarness) db = client['somedb'] utc2tz = UTCPlus2() naive = datetime(2000, 1, 1, 2, 0, 0) aware = datetime(2000, 1, 1, 4, 0, 0, tzinfo=utc2tz) if tz_awarness: returned = datetime(2000, 1, 1, 2, 0, 0, tzinfo=helpers.utc) else: returned = datetime(2000, 1, 1, 2, 0, 0) objid = db.collection.insert_one({'date_aware': aware, 'date_naive': naive}).inserted_id objs = list(db.collection.find()) self.assertEqual(objs, [{'_id': objid, 'date_aware': returned, 'date_naive': returned}]) if tz_awarness: self.assertEqual('UTC', returned.tzinfo.tzname(returned)) self.assertEqual(timedelta(0), returned.tzinfo.utcoffset(returned)) self.assertEqual(timedelta(0), returned.tzinfo.dst(returned)) self.assertEqual((timedelta(0), 'UTC'), returned.tzinfo.__getinitargs__()) # Given both date are equivalent, we can mix them db.collection.update_one( {'date_aware': naive, 'date_naive': aware}, {'$set': {'new_aware': aware, 'new_naive': naive}}, upsert=True ) objs = list(db.collection.find()) self.assertEqual(objs, [ {'_id': objid, 'date_aware': returned, 'date_naive': returned, 'new_aware': returned, 'new_naive': returned} ], msg=tz_awarness) ret = db.collection.find_one({'new_aware': naive, 'new_naive': aware}) self.assertEqual(ret, objs[0], msg=tz_awarness) num = db.collection.count_documents({'date_naive': {'$gte': aware}}) self.assertEqual(1, num, msg=tz_awarness) objs = list(db.collection.aggregate([{'$match': {'date_naive': {'$gte': aware}}}])) self.assertEqual(1, len(objs), msg=tz_awarness) db.collection.delete_one({'new_aware': naive, 'new_naive': naive}) objs = list(db.collection.find()) self.assertFalse(objs, msg=tz_awarness) def test__list_of_dates(self): client = mongomock.MongoClient(tz_aware=True) client.db.collection.insert_one({'dates': [datetime.now(), datetime.now()]}) dates = client.db.collection.find_one()['dates'] self.assertTrue(dates[0].tzinfo) self.assertEqual(dates[0].tzinfo, dates[1].tzinfo) @skipIf(helpers.HAVE_PYMONGO, 'pymongo installed') def test__current_date_timestamp_requires_pymongo(self): with self.assertRaises(NotImplementedError): self.db.collection.update_one( {}, {'$currentDate': { 'updated_at': {'$type': 'timestamp'}, 'updated_again': {'$type': 'timestamp'}, }}, upsert=True) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__current_date_timestamp(self): before = datetime.now(tz_util.utc) - timedelta(seconds=1) self.db.collection.update_one( {}, {'$currentDate': { 'updated_at': {'$type': 'timestamp'}, 'updated_again': {'$type': 'timestamp'}, }}, upsert=True) after = datetime.now(tz_util.utc) doc = self.db.collection.find_one() self.assertTrue(doc.get('updated_at')) self.assertTrue(doc.get('updated_again')) self.assertNotEqual(doc['updated_at'], doc['updated_again']) self.assertLessEqual(before, doc['updated_at'].as_datetime()) self.assertLessEqual(doc['updated_at'].as_datetime(), after) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__insert_zero_timestamp(self): self.db.collection.drop() before = datetime.now(tz_util.utc) - timedelta(seconds=1) self.db.collection.insert_one({'zero': Timestamp(0, 0)}) after = datetime.now(tz_util.utc) doc = self.db.collection.find_one() self.assertLessEqual(before, doc['zero'].as_datetime()) self.assertLessEqual(doc['zero'].as_datetime(), after) def test__rename_collection(self): self.db.collection.insert_one({'_id': 1, 'test_list': [{'data': 'val'}]}) coll = self.db.collection coll.rename('other_name') self.assertEqual('collection', coll.name) self.assertEqual( set(['other_name']), set(self.db.list_collection_names())) self.assertNotEqual(coll, self.db.other_name) self.assertEqual([], list(coll.find())) data_in_db = self.db.other_name.find() self.assertEqual( [({'_id': 1, 'test_list': [{'data': 'val'}]})], list(data_in_db)) def test__rename_collection_to_bad_names(self): coll = self.db.create_collection('a') self.assertRaises(TypeError, coll.rename, ['a']) self.assertRaises(mongomock.InvalidName, coll.rename, '.a') self.assertRaises(mongomock.InvalidName, coll.rename, '$a') def test__rename_collection_already_exists(self): coll = self.db.create_collection('a') self.db.create_collection('c') self.assertRaises(mongomock.OperationFailure, coll.rename, 'c') def test__rename_collection_drop_target(self): coll = self.db.create_collection('a') self.db.create_collection('c') coll.rename('c', dropTarget=True) self.assertEqual(set(['c']), set(self.db.list_collection_names())) def test__cursor_rewind(self): coll = self.db.create_collection('a') coll.insert_one({'a': 1}) coll.insert_one({'a': 2}) coll.insert_one({'a': 3}) curs = coll.find().sort('a') self.assertEqual(next(curs)['a'], 1) self.assertEqual(next(curs)['a'], 2) curs.rewind() self.assertEqual(next(curs)['a'], 1) self.assertEqual(next(curs)['a'], 2) def test__cursor_sort(self): coll = self.db.create_collection('a') coll.insert_many([{'a': 1}, {'a': 3}, {'a': 2}]) self.assertEqual([1, 2, 3], [doc['a'] for doc in coll.find().sort('a')]) self.assertEqual([3, 2, 1], [doc['a'] for doc in coll.find().sort('a', -1)]) self.assertEqual([1, 3, 2], [doc['a'] for doc in coll.find().sort('$natural', 1)]) self.assertEqual([2, 3, 1], [doc['a'] for doc in coll.find().sort('$natural', -1)]) with self.assertRaises(NotImplementedError) as err: list(coll.find().sort('$text_score')) self.assertIn('$text_score', str(err.exception)) cursor = coll.find() with self.assertRaises(ValueError) as err: cursor.sort([]) self.assertIn('empty list', str(err.exception)) def test__cursor_sort_composed(self): coll = self.db.create_collection('a') coll.insert_many([ {'_id': 1, 'a': 1, 'b': 2}, {'_id': 2, 'a': 1, 'b': 0}, {'_id': 3, 'a': 2, 'b': 1}, ]) self.assertEqual( [2, 1, 3], [doc['_id'] for doc in coll.find().sort((('a', 1), ('b', 1)))]) self.assertEqual( [1, 2, 3], [doc['_id'] for doc in coll.find().sort((('a', 1), ('b', -1)))]) self.assertEqual( [2, 3, 1], [doc['_id'] for doc in coll.find().sort((('b', 1), ('a', 1)))]) def test__cursor_sort_projection(self): col = self.db.col col.insert_many([{'a': 1, 'b': 1}, {'a': 3, 'b': 3}, {'a': 2, 'b': 2}]) self.assertEqual([1, 2, 3], [doc['b'] for doc in col.find().sort('a')]) self.assertEqual([1, 2, 3], [doc['b'] for doc in col.find(projection=['b']).sort('a')]) def test__cursor_sort_dicts(self): col = self.db.col col.insert_many([ {'_id': 1, 'b': {'value': 1}}, {'_id': 2, 'b': {'value': 3}}, {'_id': 3, 'b': {'value': 2}}, ]) self.assertEqual([1, 3, 2], [doc['_id'] for doc in col.find().sort('b')]) def test__cursor_max_time_ms(self): col = self.db.col col.find().max_time_ms(15) col.find().max_time_ms(None) with self.assertRaises(TypeError): col.find().max_time_ms(3.4) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_insert_one(self): operations = [pymongo.InsertOne({'a': 1, 'b': 2})] result = self.db.collection.bulk_write(operations) self.assert_document_count(1) doc = next(self.db.collection.find({})) self.assert_document_stored(doc['_id'], {'a': 1, 'b': 2}) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 0, 'nUpserted': 0, 'nMatched': 0, 'writeErrors': [], 'upserted': [], 'writeConcernErrors': [], 'nRemoved': 0, 'nInserted': 1}) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_update_one(self): # Upsert == False self.db.collection.insert_one({'a': 1}) operations = [pymongo.UpdateOne({'a': 1}, {'$set': {'a': 2}})] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({'a': 2})) self.assertEqual(len(docs), 1) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 1, 'nUpserted': 0, 'nMatched': 1, 'writeErrors': [], 'upserted': [], 'writeConcernErrors': [], 'nRemoved': 0, 'nInserted': 0}) # Upsert == True operations = [pymongo.UpdateOne({'a': 1}, {'$set': {'a': 3}}, upsert=True)] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({'a': 3})) self.assertEqual(len(docs), 1) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 0, 'nUpserted': 1, 'nMatched': 0, 'writeErrors': [], 'writeConcernErrors': [], 'upserted': [{'_id': docs[0]['_id'], 'index': 0}], 'nRemoved': 0, 'nInserted': 0}) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_update_many(self): # Upsert == False self.db.collection.insert_one({'a': 1, 'b': 1}) self.db.collection.insert_one({'a': 1, 'b': 0}) operations = [pymongo.UpdateMany({'a': 1}, {'$set': {'b': 2}})] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({'b': 2})) self.assertEqual(len(docs), 2) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 2, 'nUpserted': 0, 'nMatched': 2, 'writeErrors': [], 'upserted': [], 'writeConcernErrors': [], 'nRemoved': 0, 'nInserted': 0}) # Upsert == True operations = [pymongo.UpdateMany({'a': 2}, {'$set': {'a': 3}}, upsert=True)] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({'a': 3})) self.assertEqual(len(docs), 1) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 0, 'nUpserted': 1, 'nMatched': 0, 'writeErrors': [], 'writeConcernErrors': [], 'upserted': [{'_id': docs[0]['_id'], 'index': 0}], 'nRemoved': 0, 'nInserted': 0}) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_replace_one(self): # Upsert == False self.db.collection.insert_one({'a': 1, 'b': 0}) operations = [pymongo.ReplaceOne({'a': 1}, {'a': 2})] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({'a': 2})) self.assertEqual(len(docs), 1) doc = docs[0] doc_id = doc['_id'] self.assertEqual(doc, {'_id': doc_id, 'a': 2}) self.assertEqual(result.bulk_api_result, { 'nModified': 1, 'nUpserted': 0, 'nMatched': 1, 'writeErrors': [], 'upserted': [], 'writeConcernErrors': [], 'nRemoved': 0, 'nInserted': 0}) # Upsert == True operations = [pymongo.ReplaceOne({'a': 1}, {'a': 3}, upsert=True)] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({'a': 3})) self.assertEqual(len(docs), 1) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 0, 'nUpserted': 1, 'nMatched': 0, 'writeErrors': [], 'writeConcernErrors': [], 'upserted': [{'_id': docs[0]['_id'], 'index': 0}], 'nRemoved': 0, 'nInserted': 0}) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above') def test__bulk_write_update_id(self): self.db.collection.insert_one({'_id': 1, 'a': 1}) bulk = self.db.collection.initialize_unordered_bulk_op() bulk.add_update({'a': 1}, {'$set': {'a': 2, '_id': 42}}) with self.assertRaises(mongomock.BulkWriteError) as err_context: bulk.execute() self.assertEqual({'_id': 1, 'a': 1}, self.db.collection.find_one()) self.assertEqual( ["After applying the update, the (immutable) field '_id' was found to have been " 'altered to _id: 42'], [e['errmsg'] for e in err_context.exception.details['writeErrors']]) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_delete_one(self): self.db.collection.insert_one({'a': 1}) operations = [pymongo.DeleteOne({'a': 1})] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({})) self.assertEqual(len(docs), 0) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 0, 'nUpserted': 0, 'nMatched': 0, 'writeErrors': [], 'upserted': [], 'writeConcernErrors': [], 'nRemoved': 1, 'nInserted': 0}) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_delete_many(self): self.db.collection.insert_one({'a': 1}) self.db.collection.insert_one({'a': 1}) operations = [pymongo.DeleteMany({'a': 1})] result = self.db.collection.bulk_write(operations) docs = list(self.db.collection.find({})) self.assertEqual(len(docs), 0) self.assertIsInstance(result, mongomock.results.BulkWriteResult) self.assertEqual(result.bulk_api_result, { 'nModified': 0, 'nUpserted': 0, 'nMatched': 0, 'writeErrors': [], 'upserted': [], 'writeConcernErrors': [], 'nRemoved': 2, 'nInserted': 0}) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_matched_count_no_changes(self): self.db.collection.insert_one({'name': 'luke'}) result = self.db.collection.bulk_write([ pymongo.ReplaceOne({'name': 'luke'}, {'name': 'luke'}), ]) self.assertEqual(1, result.matched_count) self.assertEqual(0, result.modified_count) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_matched_count_replace_multiple_objects(self): self.db.collection.insert_one({'name': 'luke'}) self.db.collection.insert_one({'name': 'anna'}) result = self.db.collection.bulk_write([ pymongo.ReplaceOne({'name': 'luke'}, {'name': 'Luke'}), pymongo.ReplaceOne({'name': 'anna'}, {'name': 'anna'}), ]) self.assertEqual(2, result.matched_count) self.assertEqual(1, result.modified_count) def test_find_with_comment(self): self.db.collection.insert_one({'_id': 1}) actual = list(self.db.collection.find({'_id': 1, '$comment': 'test'})) self.assertEqual([{'_id': 1}], actual) def test__find_with_expr(self): self.db.collection.insert_many([ {'_id': 1, 'a': [5]}, {'_id': 2, 'a': [1, 2, 3]}, {'_id': 3, 'a': []}, ]) actual = list(self.db.collection.find({'$expr': {'$eq': [{'$size': ['$a']}, 1]}})) self.assertEqual([{'_id': 1, 'a': [5]}], actual) with self.assertRaises(mongomock.OperationFailure): self.db.collection.insert_one({'_id': 4}) list(self.db.collection.find({'$expr': {'$eq': [{'$size': ['$a']}, 1]}})) def test__find_or_and(self): self.db.collection.insert_many([ {'x': 1, 'y': 1}, {'x': 2, 'y': 2}, ]) search_filter = collections.OrderedDict([ ('$or', [{'x': 1}, {'x': 2}]), ('y', 2), ]) self.assertEqual([2], [d['x'] for d in self.db.collection.find(search_filter)]) def test__aggregate_replace_root(self): self.db.a.insert_many([ {'_id': 1, 'pets': {'dogs': 2, 'cats': 3}}, {'_id': 2, 'pets': {'hamsters': 3, 'cats': 4}}, ]) actual = self.db.a.aggregate([ {'$replaceRoot': {'newRoot': '$pets'}} ]) self.assertListEqual([ {'dogs': 2, 'cats': 3}, {'hamsters': 3, 'cats': 4} ], list(actual)) def test__aggregate_replace_root_use_dots(self): self.db.a.insert_many([ {'_id': 1, 'pets': {'dogs': 2, 'cats': {'male': 1}}}, {'_id': 2, 'pets': {'hamsters': 3, 'cats': {'female': 5}}}, ]) actual = self.db.a.aggregate([ {'$replaceRoot': {'newRoot': '$pets.cats'}} ]) self.assertListEqual([ {'male': 1}, {'female': 5} ], list(actual)) def test__aggregate_replace_root_non_existing(self): self.db.a.insert_many([ {'_id': 1, 'pets': {'dogs': 2, 'cats': 3}}, {'_id': 2, 'pets': {'hamsters': 3, 'cats': 4}} ]) with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$replaceRoot': { 'newRoot': '$not_here' }} ]) self.assertIn('expression', str(err.exception)) def test__aggregate_replace_root_missing_in_expr(self): self.db.a.insert_many([ {'_id': 1, 'pets': {'dogs': 2, 'cats': 3}}, {'_id': 2, 'pets': {'hamsters': 3, 'cats': 4}}, {'_id': 3, 'pets': {'cats': 5}}, ]) actual = self.db.a.aggregate([ {'$replaceRoot': { 'newRoot': {'dogs': '$pets.dogs', 'hamsters': '$pets.hamsters'}, }} ]) self.assertEqual([ {'dogs': 2}, {'hamsters': 3}, {}, ], list(actual)) def test__aggregate_replace_root_static(self): self.db.a.insert_many([ {'_id': 1, 'pets': {'dogs': 2, 'cats': 3}}, {'_id': 2, 'pets': {'hamsters': 3, 'cats': 4}} ]) actual = self.db.a.aggregate([ {'$replaceRoot': { 'newRoot': {'document': 'new'} }} ]) self.assertListEqual([ {'document': 'new'}, {'document': 'new'} ], list(actual)) def test__aggregate_replace_root_expression(self): self.db.a.insert_many([ {'_id': 1, 'first_name': 'Gary', 'last_name': 'Sheffield', 'city': 'New York'}, {'_id': 2, 'first_name': 'Nancy', 'last_name': 'Walker', 'city': 'Anaheim'}, {'_id': 3, 'first_name': 'Peter', 'last_name': 'Sumner', 'city': 'Toledo'} ]) actual = self.db.a.aggregate([{'$replaceRoot': { 'newRoot': {'full_name': {'$concat': ['$first_name', ' ', '$last_name']}}, }}]) self.assertListEqual([ {'full_name': 'Gary Sheffield'}, {'full_name': 'Nancy Walker'}, {'full_name': 'Peter Sumner'} ], list(actual)) def test__aggregate_replace_root_with_array(self): self.db.a.insert_many([ { '_id': 1, 'name': 'Susan', 'phones': [{'cell': '555-653-6527'}, {'home': '555-965-2454'}], }, { '_id': 2, 'name': 'Mark', 'phones': [{'cell': '555-445-8767'}, {'home': '555-322-2774'}], }, ]) actual = self.db.a.aggregate([ {'$unwind': '$phones'}, {'$match': {'phones.cell': {'$exists': True}}}, {'$replaceRoot': {'newRoot': '$phones'}} ]) self.assertListEqual([ {'cell': '555-653-6527'}, {'cell': '555-445-8767'} ], list(actual)) def test__aggregate_replace_root_wrong_options(self): self.db.a.insert_many([ {'_id': 1, 'pets': {'dogs': 2, 'cats': 3}}, {'_id': 2, 'pets': {'hamsters': 3, 'cats': 4}}, ]) with self.assertRaises(mongomock.OperationFailure): self.db.a.aggregate([ {'$replaceRoot': {'new_root': '$pets'}} ]) def test__aggregate_lookup(self): self.db.a.insert_one({'_id': 1, 'arr': [2, 4]}) self.db.b.insert_many([ {'_id': 2, 'should': 'include'}, {'_id': 3, 'should': 'skip'}, {'_id': 4, 'should': 'include'} ]) actual = self.db.a.aggregate([ {'$lookup': { 'from': 'b', 'localField': 'arr', 'foreignField': '_id', 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'arr': [2, 4], 'b': [ {'_id': 2, 'should': 'include'}, {'_id': 4, 'should': 'include'} ] }], list(actual)) def test__aggregate_lookup_reverse(self): self.db.a.insert_many([ {'_id': 1}, {'_id': 2}, {'_id': 3} ]) self.db.b.insert_one({'_id': 4, 'arr': [1, 3]}) actual = self.db.a.aggregate([ {'$lookup': { 'from': 'b', 'localField': '_id', 'foreignField': 'arr', 'as': 'b' }} ]) self.assertEqual([ {'_id': 1, 'b': [{'_id': 4, 'arr': [1, 3]}]}, {'_id': 2, 'b': []}, {'_id': 3, 'b': [{'_id': 4, 'arr': [1, 3]}]} ], list(actual)) def test__aggregate_lookup_not_implemented_operators(self): with self.assertRaises(NotImplementedError) as err: self.db.a.aggregate([ {'$lookup': { 'let': '_id' }} ]) self.assertIn( "Although 'let' is a valid lookup operator for the", str(err.exception)) def test__aggregate_lookup_missing_operator(self): with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$lookup': { 'localField': '_id', 'foreignField': 'arr', 'as': 'b' }} ]) self.assertEqual( "Must specify 'from' field for a $lookup", str(err.exception)) def test__aggregate_lookup_operator_not_string(self): with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$lookup': { 'from': 'b', 'localField': 1, 'foreignField': 'arr', 'as': 'b' }} ]) self.assertEqual( 'Arguments to $lookup must be strings', str(err.exception)) def test__aggregate_lookup_dot_in_local_field(self): self.db.a.insert_many([ {'_id': 2, 'should': {'do': 'join'}}, {'_id': 3, 'should': {'do': 'not_join'}}, {'_id': 4, 'should': 'skip'}, ]) self.db.b.insert_many([ {'_id': 2, 'should': 'join'}, {'_id': 3, 'should': 'join'}, {'_id': 4, 'should': 'skip'}, ]) actual = self.db.a.aggregate([ {'$lookup': { 'from': 'b', 'localField': 'should.do', 'foreignField': 'should', 'as': 'b' }} ]) self.assertEqual([ { '_id': 2, 'b': [{'_id': 2, 'should': 'join'}, {'_id': 3, 'should': 'join'}], 'should': {'do': 'join'} }, {'_id': 3, 'b': [], 'should': {'do': 'not_join'}}, {'_id': 4, 'b': [], 'should': 'skip'} ], list(actual)) def test__aggregate_lookup_dot_in_as(self): with self.assertRaises(NotImplementedError) as err: self.db.a.aggregate([ {'$lookup': { 'from': 'b', 'localField': '_id', 'foreignField': 'arr', 'as': 'should.fail' }} ]) self.assertIn( "Although '.' is valid in the 'as' parameters ", str(err.exception)) def test__aggregate_graph_lookup_behaves_as_lookup(self): self.db.a.insert_one({'_id': 1, 'arr': [2, 4]}) self.db.b.insert_many([ {'_id': 2, 'should': 'include'}, {'_id': 3, 'should': 'skip'}, {'_id': 4, 'should': 'include'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$arr', 'connectFromField': 'should', 'connectToField': '_id', 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'arr': [2, 4], 'b': [ {'_id': 2, 'should': 'include'}, {'_id': 4, 'should': 'include'} ] }], list(actual)) def test__aggregate_graph_lookup_basic(self): self.db.a.insert_one({'_id': 1, 'item': 2}) self.db.b.insert_many([ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'include'}, {'_id': 5, 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$item', 'connectFromField': 'parent', 'connectToField': '_id', 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'item': 2, 'b': [ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'include'} ] }], list(actual)) def test__aggregate_graph_lookup_expression_start_with(self): self.db.a.insert_one({'_id': 1, 'item': 2}) self.db.b.insert_many([ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'include'}, {'_id': 5, 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': {'$add': [1, 1]}, 'connectFromField': 'parent', 'connectToField': '_id', 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'item': 2, 'b': [ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'include'} ] }], list(actual)) def test__aggregate_graph_lookup_depth_field(self): self.db.a.insert_one({'_id': 1, 'item': 2}) self.db.b.insert_many([ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'include'}, {'_id': 5, 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$item', 'connectFromField': 'parent', 'connectToField': '_id', 'depthField': 'dpth', 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'item': 2, 'b': [ {'_id': 2, 'parent': 3, 'should': 'include', 'dpth': 0}, {'_id': 3, 'parent': 4, 'should': 'include', 'dpth': 1}, {'_id': 4, 'should': 'include', 'dpth': 2} ] }], list(actual)) def test__aggregate_graph_lookup_multiple_connections(self): self.db.a.insert_one({'_id': 1, 'parent_name': 'b'}) self.db.b.insert_many([ {'_id': 2, 'name': 'a', 'parent': 'b', 'should': 'include'}, {'_id': 3, 'name': 'b', 'should': 'skip'}, {'_id': 4, 'name': 'c', 'parent': 'b', 'should': 'include'}, {'_id': 5, 'name': 'd', 'parent': 'c', 'should': 'include'}, {'_id': 6, 'name': 'e', 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$parent_name', 'connectFromField': 'name', 'connectToField': 'parent', 'depthField': 'dpth', 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'parent_name': 'b', 'b': [ {'_id': 2, 'name': 'a', 'parent': 'b', 'should': 'include', 'dpth': 0}, {'_id': 4, 'name': 'c', 'parent': 'b', 'should': 'include', 'dpth': 0}, {'_id': 5, 'name': 'd', 'parent': 'c', 'should': 'include', 'dpth': 1}, ] }], list(actual)) def test__aggregate_graph_lookup_cyclic_pointers(self): self.db.a.insert_one({'_id': 1, 'parent_name': 'b'}) self.db.b.insert_many([ {'_id': 2, 'name': 'a', 'parent': 'b', 'should': 'include'}, {'_id': 3, 'name': 'b', 'parent': 'a', 'should': 'include'}, {'_id': 4, 'name': 'c', 'parent': 'b', 'should': 'include'}, {'_id': 5, 'name': 'd', 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$parent_name', 'connectFromField': 'name', 'connectToField': 'parent', 'depthField': 'dpth', 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'parent_name': 'b', 'b': [ {'_id': 2, 'name': 'a', 'parent': 'b', 'should': 'include', 'dpth': 0}, {'_id': 4, 'name': 'c', 'parent': 'b', 'should': 'include', 'dpth': 0}, {'_id': 3, 'name': 'b', 'parent': 'a', 'should': 'include', 'dpth': 1} ] }], list(actual)) def test__aggregate_graph_lookup_restrict_search(self): self.db.a.insert_one({'_id': 1, 'item': 2}) self.db.b.insert_many([ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'skip'}, {'_id': 5, 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$item', 'connectFromField': 'parent', 'connectToField': '_id', 'restrictSearchWithMatch': {'should': 'include'}, 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'item': 2, 'b': [ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'} ] }], list(actual)) def test__aggregate_graph_lookup_max_depth(self): self.db.a.insert_one({'_id': 1, 'item': 2}) self.db.b.insert_many([ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'skip'}, {'_id': 5, 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$item', 'connectFromField': 'parent', 'connectToField': '_id', 'maxDepth': 1, 'as': 'b' }} ]) self.assertEqual([{ '_id': 1, 'item': 2, 'b': [ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'} ] }], list(actual)) def test__aggregate_graph_lookup_max_depth_0(self): self.db.a.insert_one({'_id': 1, 'item': 2}) self.db.b.insert_many([ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'skip'}, {'_id': 5, 'should': 'skip'} ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$item', 'connectFromField': 'parent', 'connectToField': '_id', 'maxDepth': 0, 'as': 'b' }} ]) lookup_res = self.db.a.aggregate([ {'$lookup': { 'from': 'b', 'localField': 'item', 'foreignField': '_id', 'as': 'b' }} ]) self.assertEqual(list(lookup_res), list(actual)) def test__aggregate_graph_lookup_from_array(self): self.db.a.insert_one({'_id': 1, 'items': [2, 8]}) self.db.b.insert_many([ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'include'}, {'_id': 5, 'should': 'skip'}, {'_id': 6, 'should': 'include'}, {'_id': 7, 'should': 'skip'}, {'_id': 8, 'parent': 6, 'should': 'include'}, ]) actual = self.db.a.aggregate([ {'$graphLookup': { 'from': 'b', 'startWith': '$items', 'connectFromField': 'parent', 'connectToField': '_id', 'as': 'b' }} ]) expected_list = [ {'_id': 2, 'parent': 3, 'should': 'include'}, {'_id': 3, 'parent': 4, 'should': 'include'}, {'_id': 4, 'should': 'include'}, {'_id': 6, 'should': 'include'}, {'_id': 8, 'parent': 6, 'should': 'include'} ] result_list = list(actual)[0]['b'] def sorter(doc): return doc['_id'] self.assertEqual(len(expected_list), len(result_list)) self.assertEqual(sorted(expected_list, key=sorter), sorted(result_list, key=sorter)) def test_aggregate_graph_lookup_basic_connect_from(self): """TESTCASE FOR GRAPHLOOKUP WITH CONNECT FROM FIELD * This testcase has a simple connect from field without the dot operator. * The test case is taken from https://docs.mongodb.com/manual/reference/operator/aggregation/graphLookup/ * The inputs and the query are copy/pasted directly from the link above. * The expected output is formatted to match the pprint'ed output produced by mongomock. * The elements are: - data_a: documents for database a - data_b: documents for database b - query: query for database b - expected: result expected from query execution """ data_a = [ {'_id': 0, 'airport': 'JFK', 'connects': ['BOS', 'ORD']}, {'_id': 1, 'airport': 'BOS', 'connects': ['JFK', 'PWM']}, {'_id': 2, 'airport': 'ORD', 'connects': ['JFK']}, {'_id': 3, 'airport': 'PWM', 'connects': ['BOS', 'LHR']}, {'_id': 4, 'airport': 'LHR', 'connects': ['PWM']}, ] data_b = [ {'_id': 1, 'name': 'Dev', 'nearestAirport': 'JFK'}, {'_id': 2, 'name': 'Eliot', 'nearestAirport': 'JFK'}, {'_id': 3, 'name': 'Jeff', 'nearestAirport': 'BOS'}, ] query = [ { '$graphLookup': { 'from': 'a', 'startWith': '$nearestAirport', 'connectFromField': 'connects', 'connectToField': 'airport', 'maxDepth': 2, 'depthField': 'numConnections', 'as': 'destinations' } } ] ordered_dict = collections.OrderedDict expected = [{'_id': 1, 'destinations': [ordered_dict([('_id', 0), ('airport', 'JFK'), ('connects', ['BOS', 'ORD']), ('numConnections', 0)]), ordered_dict([('_id', 1), ('airport', 'BOS'), ('connects', ['JFK', 'PWM']), ('numConnections', 1)]), ordered_dict([('_id', 2), ('airport', 'ORD'), ('connects', ['JFK']), ('numConnections', 1)]), ordered_dict([('_id', 3), ('airport', 'PWM'), ('connects', ['BOS', 'LHR']), ('numConnections', 2)])], 'name': 'Dev', 'nearestAirport': 'JFK'}, {'_id': 2, 'destinations': [ordered_dict([('_id', 0), ('airport', 'JFK'), ('connects', ['BOS', 'ORD']), ('numConnections', 0)]), ordered_dict([('_id', 1), ('airport', 'BOS'), ('connects', ['JFK', 'PWM']), ('numConnections', 1)]), ordered_dict([('_id', 2), ('airport', 'ORD'), ('connects', ['JFK']), ('numConnections', 1)]), ordered_dict([('_id', 3), ('airport', 'PWM'), ('connects', ['BOS', 'LHR']), ('numConnections', 2)])], 'name': 'Eliot', 'nearestAirport': 'JFK'}, {'_id': 3, 'destinations': [ordered_dict([('_id', 1), ('airport', 'BOS'), ('connects', ['JFK', 'PWM']), ('numConnections', 0)]), ordered_dict([('_id', 0), ('airport', 'JFK'), ('connects', ['BOS', 'ORD']), ('numConnections', 1)]), ordered_dict([('_id', 3), ('airport', 'PWM'), ('connects', ['BOS', 'LHR']), ('numConnections', 1)]), ordered_dict([('_id', 2), ('airport', 'ORD'), ('connects', ['JFK']), ('numConnections', 2)]), ordered_dict([('_id', 4), ('airport', 'LHR'), ('connects', ['PWM']), ('numConnections', 2)])], 'name': 'Jeff', 'nearestAirport': 'BOS'}] self.db.a.insert_many(data_a) self.db.b.insert_many(data_b) actual = self.db.b.aggregate(query) actual = list(actual) # the diff between expected and actual should be empty res = diff(expected, actual) self.assertEqual(res, []) def test_aggregate_graph_lookup_nested_array(self): """TESTCASE FOR GRAPHLOOKUP WITH CONNECT FROM FIELD * This test cases connectfrom x.y where x is an array. * The test case is adaptaed from https://docs.mongodb.com/manual/reference/operator/aggregation/graphLookup/ * The input is modified wrap a dictionary around the list of cities in * And query is modified accordingly. * The expected output is formatted to match the pprint'ed output produced by mongomock. * The elements are: - data_a: documents for database a - data_b: documents for database b - query: query for database b - expected: result expected from query execution """ data_a = [ {'_id': 0, 'airport': 'JFK', 'connects': [ {'to': 'BOS', 'distance': 200}, {'to': 'ORD', 'distance': 800}]}, {'_id': 1, 'airport': 'BOS', 'connects': [ {'to': 'JFK', 'distance': 200}, {'to': 'PWM', 'distance': 2000}]}, {'_id': 2, 'airport': 'ORD', 'connects': [{'to': 'JFK', 'distance': 800}]}, {'_id': 3, 'airport': 'PWM', 'connects': [ {'to': 'BOS', 'distance': 2000}, {'to': 'LHR', 'distance': 6000}]}, {'_id': 4, 'airport': 'LHR', 'connects': [{'to': 'PWM', 'distance': 6000}]}, ] data_b = [ {'_id': 1, 'name': 'Dev', 'nearestAirport': 'JFK'}, {'_id': 2, 'name': 'Eliot', 'nearestAirport': 'JFK'}, {'_id': 3, 'name': 'Jeff', 'nearestAirport': 'BOS'}, ] query = [ { '$graphLookup': { 'from': 'a', 'startWith': '$nearestAirport', 'connectFromField': 'connects.to', 'connectToField': 'airport', 'maxDepth': 2, 'depthField': 'numConnections', 'as': 'destinations' } } ] ordered_dict = collections.OrderedDict expected = [{'_id': 1, 'destinations': [ordered_dict([('_id', 0), ('airport', 'JFK'), ('connects', [{'distance': 200, 'to': 'BOS'}, {'distance': 800, 'to': 'ORD'}]), ('numConnections', 0)]), ordered_dict([('_id', 1), ('airport', 'BOS'), ('connects', [{'distance': 200, 'to': 'JFK'}, {'distance': 2000, 'to': 'PWM'}]), ('numConnections', 1)]), ordered_dict([('_id', 2), ('airport', 'ORD'), ('connects', [{'distance': 800, 'to': 'JFK'}]), ('numConnections', 1)]), ordered_dict([('_id', 3), ('airport', 'PWM'), ('connects', [{'distance': 2000, 'to': 'BOS'}, {'distance': 6000, 'to': 'LHR'}]), ('numConnections', 2)])], 'name': 'Dev', 'nearestAirport': 'JFK'}, {'_id': 2, 'destinations': [ordered_dict([('_id', 0), ('airport', 'JFK'), ('connects', [{'distance': 200, 'to': 'BOS'}, {'distance': 800, 'to': 'ORD'}]), ('numConnections', 0)]), ordered_dict([('_id', 1), ('airport', 'BOS'), ('connects', [{'distance': 200, 'to': 'JFK'}, {'distance': 2000, 'to': 'PWM'}]), ('numConnections', 1)]), ordered_dict([('_id', 2), ('airport', 'ORD'), ('connects', [{'distance': 800, 'to': 'JFK'}]), ('numConnections', 1)]), ordered_dict([('_id', 3), ('airport', 'PWM'), ('connects', [{'distance': 2000, 'to': 'BOS'}, {'distance': 6000, 'to': 'LHR'}]), ('numConnections', 2)])], 'name': 'Eliot', 'nearestAirport': 'JFK'}, {'_id': 3, 'destinations': [ordered_dict([('_id', 1), ('airport', 'BOS'), ('connects', [{'distance': 200, 'to': 'JFK'}, {'distance': 2000, 'to': 'PWM'}]), ('numConnections', 0)]), ordered_dict([('_id', 0), ('airport', 'JFK'), ('connects', [{'distance': 200, 'to': 'BOS'}, {'distance': 800, 'to': 'ORD'}]), ('numConnections', 1)]), ordered_dict([('_id', 3), ('airport', 'PWM'), ('connects', [{'distance': 2000, 'to': 'BOS'}, {'distance': 6000, 'to': 'LHR'}]), ('numConnections', 1)]), ordered_dict([('_id', 2), ('airport', 'ORD'), ('connects', [{'distance': 800, 'to': 'JFK'}]), ('numConnections', 2)]), ordered_dict([('_id', 4), ('airport', 'LHR'), ('connects', [{'distance': 6000, 'to': 'PWM'}]), ('numConnections', 2)])], 'name': 'Jeff', 'nearestAirport': 'BOS'}] self.db.a.insert_many(data_a) self.db.b.insert_many(data_b) actual = self.db.b.aggregate(query) actual = list(actual) # the diff between expected and actual should be empty res = diff(expected, actual) self.assertEqual(res, []) def test_aggregate_graph_lookup_connect_from_nested_dict(self): """TESTCASE FOR GRAPHLOOKUP WITH CONNECT FROM FIELD * This test cases connectfrom x.y where x is a dictionary. * The testcase is taken from https://stackoverflow.com/questions/40989763/mongodb-graphlookup * The inputs and the query are copy/pasted directly from the link above (with some cleanup) * The expected output is formatted to match the pprint'ed output produced by mongomock. * The elements are: - data_a: documents for database a - data_b: documents for database b - query: query for database b - expected: result expected from query execution """ data_b = [ {'_id': 1, 'name': 'Dev'}, {'_id': 2, 'name': 'Eliot', 'reportsTo': { 'name': 'Dev', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 3, 'name': 'Ron', 'reportsTo': {'name': 'Eliot', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 4, 'name': 'Andrew', 'reportsTo': { 'name': 'Eliot', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 5, 'name': 'Asya', 'reportsTo': { 'name': 'Ron', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 6, 'name': 'Dan', 'reportsTo': {'name': 'Andrew', 'from': '2016-01-01T00:00:00.000Z'}}, ] data_a = [{'_id': 1, 'name': 'x'}] query = [ { '$graphLookup': { 'from': 'b', 'startWith': '$name', 'connectFromField': 'reportsTo.name', 'connectToField': 'name', 'as': 'reportingHierarchy' } } ] expected = [{'_id': 1, 'name': 'Dev', 'reportingHierarchy': [{'_id': 1, 'name': 'Dev'}]}, {'_id': 2, 'name': 'Eliot', 'reportingHierarchy': [{'_id': 2, 'name': 'Eliot', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Dev'}}, {'_id': 1, 'name': 'Dev'}], 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Dev'}}, {'_id': 3, 'name': 'Ron', 'reportingHierarchy': [{'_id': 3, 'name': 'Ron', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Eliot'}}, {'_id': 2, 'name': 'Eliot', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Dev'}}, {'_id': 1, 'name': 'Dev'}], 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Eliot'}}, {'_id': 4, 'name': 'Andrew', 'reportingHierarchy': [{'_id': 4, 'name': 'Andrew', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Eliot'}}, {'_id': 2, 'name': 'Eliot', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Dev'}}, {'_id': 1, 'name': 'Dev'}], 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Eliot'}}, {'_id': 5, 'name': 'Asya', 'reportingHierarchy': [{'_id': 5, 'name': 'Asya', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Ron'}}, {'_id': 3, 'name': 'Ron', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Eliot'}}, {'_id': 2, 'name': 'Eliot', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Dev'}}, {'_id': 1, 'name': 'Dev'}], 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Ron'}}, {'_id': 6, 'name': 'Dan', 'reportingHierarchy': [{'_id': 6, 'name': 'Dan', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Andrew'}}, {'_id': 4, 'name': 'Andrew', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Eliot'}}, {'_id': 2, 'name': 'Eliot', 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Dev'}}, {'_id': 1, 'name': 'Dev'}], 'reportsTo': {'from': '2016-01-01T00:00:00.000Z', 'name': 'Andrew'}}] self.db.a.insert_many(data_a) self.db.b.insert_many(data_b) actual = self.db.b.aggregate(query) actual = list(actual) # the diff between expected and actual should be empty res = diff(expected, actual) self.assertEqual(res, []) def test__aggregate_graph_lookup_missing_operator(self): with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$graphLookup': { 'from': 'arr', 'startWith': '$_id', 'connectFromField': 'arr', 'as': 'b' }} ]) self.assertEqual( "Must specify 'connectToField' field for a $graphLookup", str(err.exception)) def test__aggregate_graphlookup_operator_not_string(self): with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$graphLookup': { 'from': 'arr', 'startWith': '$_id', 'connectFromField': 1, 'connectToField': '_id', 'as': 'b' }} ]) self.assertEqual( "Argument 'connectFromField' to $graphLookup must be string", str(err.exception)) def test__aggregate_graph_lookup_restrict_not_dict(self): with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$graphLookup': { 'from': 'arr', 'startWith': '$_id', 'connectFromField': 'parent', 'connectToField': '_id', 'restrictSearchWithMatch': 3, 'as': 'b' }} ]) self.assertEqual( "Argument 'restrictSearchWithMatch' to $graphLookup must be a Dictionary", str(err.exception)) def test__aggregate_graph_lookup_max_depth_not_number(self): with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$graphLookup': { 'from': 'arr', 'startWith': '$_id', 'connectFromField': 'parent', 'connectToField': '_id', 'maxDepth': 's', 'as': 'b' }} ]) self.assertEqual( "Argument 'maxDepth' to $graphLookup must be a number", str(err.exception)) def test__aggregate_graph_lookup_depth_filed_not_string(self): with self.assertRaises(mongomock.OperationFailure) as err: self.db.a.aggregate([ {'$graphLookup': { 'from': 'arr', 'startWith': '$_id', 'connectFromField': 'parent', 'connectToField': '_id', 'depthField': 4, 'as': 'b' }} ]) self.assertEqual( "Argument 'depthField' to $graphlookup must be a string", str(err.exception)) def test__aggregate_graph_lookup_dot_in_as_field(self): with self.assertRaises(NotImplementedError) as err: self.db.a.aggregate([ {'$graphLookup': { 'from': 'arr', 'startWith': '$_id', 'connectFromField': 'parent', 'connectToField': '_id', 'as': 'b.id' }} ]) self.assertIn( "Although '.' is valid in the 'as' parameter", str(err.exception)) def test__aggregate_sample(self): self.db.a.insert_many([ {'_id': i} for i in range(5) ]) actual = list(self.db.a.aggregate([{'$sample': {'size': 2}}])) self.assertEqual(2, len(actual)) results = {doc.get('_id') for doc in actual} self.assertLessEqual(results, {0, 1, 2, 3, 4}) self.assertLessEqual(2, len(results)) actual = list(self.db.a.aggregate([{'$sample': {'size': 10}}])) self.assertEqual(5, len(actual)) self.assertEqual({doc.get('_id') for doc in actual}, {0, 1, 2, 3, 4}) def test__aggregate_empty(self): self.db.a.drop() actual = list(self.db.a.aggregate([{'$sample': {'size': 1}}])) self.assertEqual([], list(actual)) def test__aggregate_sample_errors(self): self.db.a.insert_many([ {'_id': i} for i in range(5) ]) # Many cases for '$sample' options that should raise an operation failure. cases = (None, 3, {}, {'size': 2, 'otherUnknownOption': 3}) for case in cases: with self.assertRaises(mongomock.OperationFailure): self.db.a.aggregate([{'$sample': case}]) def test__aggregate_count(self): self.db.a.insert_many([ {'_id': 1, 'a': 1}, {'_id': 2, 'a': 2}, {'_id': 3, 'a': 1} ]) actual = list(self.db.a.aggregate([ {'$match': {'a': 1}}, {'$count': 'one_count'} ])) self.assertEqual([{'one_count': 2}], actual) def test__aggregate_count_errors(self): self.db.a.insert_many([ {'_id': i} for i in range(5) ]) # Many cases for '$count' options that should raise an operation failure. cases = (None, 3, {}, [], '', '$one_count', 'one.count') for case in cases: with self.assertRaises(mongomock.OperationFailure): self.db.a.aggregate([{'$count': case}]) def test__aggregate_facet(self): collection = self.db.collection collection.drop() collection.insert_many([ { '_id': 1, 'title': 'The Pillars of Society', 'artist': 'Grosz', 'year': 1926, 'price': 199.99, }, { '_id': 2, 'title': 'Melancholy III', 'artist': 'Munch', 'year': 1902, 'price': 200.00, }, { '_id': 3, 'title': 'Melancholy III', 'artist': 'Munch', 'year': 1902, 'price': 200.00, } ]) actual = collection.aggregate([ {'$group': {'_id': '$year'}}, {'$facet': { 'grouped_and_limited': [{'$limit': 1}], 'groups_count': [{'$count': 'total_count'}], 'grouped_and_unlimited': []}} ]) expect = [{ 'grouped_and_limited': [{'_id': 1902}], 'grouped_and_unlimited': [{'_id': 1902}, {'_id': 1926}], 'groups_count': [{'total_count': 2}] }] self.assertEqual(expect, list(actual)) def test__aggregate_project_array_size(self): self.db.collection.insert_one({'_id': 1, 'arr': [2, 3]}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a', {'$size': '$arr'}) ])} ]) self.assertEqual([{'a': 2}], list(actual)) def test__aggregate_project_array_size_missing(self): self.db.collection.insert_one({'_id': 1}) with self.assertRaises(mongomock.OperationFailure): list(self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a', {'$size': '$arr'}) ])} ])) def test__aggregate_project_cond_mongodb_to_bool(self): self.db.collection.insert_one({'_id': 1}) actual = self.db.collection.aggregate([ {'$project': { '_id': False, # undefined aka KeyError 'undefined_value': {'$cond': ['$not_existing_field', 't', 'f']}, 'false_value': {'$cond': [False, 't', 'f']}, 'null_value': {'$cond': [None, 't', 'f']}, 'zero_value': {'$cond': [0, 't', 'f']}, 'true_value': {'$cond': [True, 't', 'f']}, 'one_value': {'$cond': [1, 't', 'f']}, 'empty_string': {'$cond': ['', 't', 'f']}, 'empty_list': {'$cond': [[], 't', 'f']}, 'empty_dict': {'$cond': [{}, 't', 'f']}, }}, ]) expected = { 'undefined_value': 'f', 'false_value': 'f', 'null_value': 'f', 'zero_value': 'f', 'true_value': 't', 'one_value': 't', 'empty_string': 't', 'empty_list': 't', 'empty_dict': 't', } self.assertEqual([expected], list(actual)) def test__aggregate_project_array_size_if_null(self): self.db.collection.insert_one({'_id': 1, 'arr': [2, 3]}) self.db.collection.insert_one({'_id': 2}) self.db.collection.insert_one({'_id': 3, 'arr': None}) actual = self.db.collection.aggregate([ {'$project': collections.OrderedDict([ ('_id', False), ('a', {'$size': {'$ifNull': ['$arr', []]}}) ])} ]) self.assertEqual([{'a': 2}, {'a': 0}, {'a': 0}], list(actual)) def test__aggregate_project_if_null(self): self.db.collection.insert_one({'_id': 1, 'elem_a': ''}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a', {'$ifNull': ['$elem_a', '']}), ('b', {'$ifNull': ['$elem_b', '']}) ])} ]) self.assertEqual([{'a': '', 'b': ''}], list(actual)) @skipIf( SERVER_VERSION > version.parse('4.4'), 'multiple input expressions in $ifNull are not supported in MongoDB v4.4 and earlier') def test__aggregate_project_if_null_multi_field_not_supported(self): self.db.collection.insert_one({'_id': 1, 'elem_a': ''}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a_and_b', {'$ifNull': ['$elem_a', '$elem_b', '']}), ('b_and_a', {'$ifNull': ['$elem_b', '$elem_a', '']}), ('b_and_c', {'$ifNull': ['$elem_b', '$elem_c', '']}), ])} ]) @skipIf( SERVER_VERSION <= version.parse('4.4'), 'multiple input expressions in $ifNull are not supported in MongoDB v4.4 and earlier') def test__aggregate_project_if_null_multi_field(self): self.db.collection.insert_one({'_id': 1, 'elem_a': ''}) actual = list(self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a_and_b', {'$ifNull': ['$elem_a', '$elem_b', '']}), ('b_and_a', {'$ifNull': ['$elem_b', '$elem_a', '']}), ('b_and_c', {'$ifNull': ['$elem_b', '$elem_c', '']}), ])} ])) expected = [{'a_and_b': '', 'b_and_a': '', 'b_and_c': ''}] self.assertEqual(expected, list(actual)) def test__aggregate_project_if_null_expression(self): self.db.collection.insert_many([ {'_id': 1, 'description': 'Description 1', 'title': 'Title 1'}, {'_id': 2, 'title': 'Title 2'}, {'_id': 3, 'description': None, 'title': 'Title 3'}, ]) actual = self.db.collection.aggregate([{ '$project': { 'full_description': {'$ifNull': ['$description', '$title']}, } }]) self.assertEqual([ {'_id': 1, 'full_description': 'Description 1'}, {'_id': 2, 'full_description': 'Title 2'}, {'_id': 3, 'full_description': 'Title 3'}, ], list(actual)) def test__aggregate_switch(self): self.db.collection.insert_one({'_id': 1, 'a': 0}) # Expressions taken directly from official documentation: # https://docs.mongodb.com/manual/reference/operator/aggregation/switch/ actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': { 'doc_example_1': { '$switch': { 'branches': [ {'case': {'$eq': ['$a', 5]}, 'then': 'equals'}, {'case': {'$gt': ['$a', 5]}, 'then': 'greater than'}, {'case': {'$lt': ['$a', 5]}, 'then': 'less than'}, ], } }, 'doc_example_2': { '$switch': { 'branches': [ {'case': {'$eq': ['$a', 5]}, 'then': 'equals'}, {'case': {'$gt': ['$a', 5]}, 'then': 'greater than'}, ], 'default': 'did not match', } }, 'doc_example_3': { '$switch': { 'branches': [ {'case': 'this is true', 'then': 'first case'}, {'case': False, 'then': 'second case'}, ], 'default': 'did not match', } }, 'branches_is_tuple': { '$switch': { 'branches': ( {'case': False, 'then': 'value_f'}, {'case': True, 'then': 'value_t'}, ), } }, 'missing_field': { '$switch': { 'branches': [ {'case': '$missing_field', 'then': 'first case'}, {'case': True, 'then': '$missing_field'}, ], 'default': 'did not match', } }, }}, ]) expected = { '_id': 1, 'doc_example_1': 'less than', 'doc_example_2': 'did not match', 'doc_example_3': 'first case', 'branches_is_tuple': 'value_t', } self.assertEqual([expected], list(actual)) def test__aggregate_switch_operation_failures(self): self.db.collection.insert_one({'_id': 1, 'a': 0}) tests_cases = [ ( {'$switch': []}, '$switch requires an object as an argument, found: %s' % type([]), ), ( {'$switch': {}}, '$switch requires at least one branch.', ), ( {'$switch': {'branches': {}}}, "$switch expected an array for 'branches', found: %s" % type({}), ), ( {'$switch': {'branches': []}}, '$switch requires at least one branch.', ), ( {'$switch': {'branches': [{}, 7]}}, "$switch requires each branch have a 'case' expression" ), ( {'$switch': {'branches': [{'case': True}, 7]}}, "$switch requires each branch have a 'then' expression." ), ( {'$switch': {'branches': [{'case': True, 'then': 3}, 7]}}, '$switch expected each branch to be an object, found: %s' % type(0), ), ( {'$switch': {'branches': [7, {}]}}, '$switch expected each branch to be an object, found: %s' % type(0), ), ( {'$switch': {'branches': [{'case': False, 'then': 3}]}}, '$switch could not find a matching branch for an input, ' 'and no default was specified.', ), ] for switch_operator, expected_exception in tests_cases: pipeline = [ {'$match': {'_id': 1}}, {'$project': {'result_field': switch_operator}}, ] with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate(pipeline) self.assertEqual(expected_exception, str(err.exception)) def test__aggregate_switch_mongodb_to_bool(self): def build_switch(case): return { '$switch': { 'branches': [ {'case': case, 'then': 't'}, ], 'default': 'f', } } self.db.collection.insert_one({'_id': 1}) actual = self.db.collection.aggregate([ {'$project': { '_id': False, 'undefined_value': build_switch('$not_existing_field'), 'false_value': build_switch(False), 'null_value': build_switch(None), 'zero_value': build_switch(0), 'true_value': build_switch(True), 'one_value': build_switch(1), 'empty_string': build_switch(''), 'empty_list': build_switch([]), 'empty_dict': build_switch({}), }}, ]) expected = { 'undefined_value': 'f', 'false_value': 'f', 'null_value': 'f', 'zero_value': 'f', 'true_value': 't', 'one_value': 't', 'empty_string': 't', 'empty_list': 't', 'empty_dict': 't', } self.assertEqual([expected], list(actual)) def test__aggregate_project_array_element_at(self): self.db.collection.insert_one({'_id': 1, 'arr': [2, 3]}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a', {'$arrayElemAt': ['$arr', 1]}) ])} ]) self.assertEqual([{'a': 3}], list(actual)) def test__aggregate_project_first(self): self.db.collection.insert_one({'_id': 1, 'arr': [2, 3]}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a', {'$first': '$arr'}) ])} ]) self.assertEqual([{'a': 2}], list(actual)) def test__aggregate_project_last(self): self.db.collection.insert_one({'_id': 1, 'arr': [2, 3]}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('a', {'$last': '$arr'}) ])} ]) self.assertEqual([{'a': 3}], list(actual)) def test__aggregate_project_rename__id(self): self.db.collection.insert_one({'_id': 1, 'arr': [2, 3]}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('rename_id', '$_id') ])} ]) self.assertEqual([{'rename_id': 1}], list(actual)) def test__aggregate_project_rename_dot_fields(self): self.db.collection.insert_one({'_id': 1, 'arr': {'a': 2, 'b': 3}}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('rename_dot', '$arr.a') ])} ]) self.assertEqual([{'rename_dot': 2}], list(actual)) def test__aggregate_project_id(self): self.db.collection.insert_many([ {'_id': 1, 'a': 11}, {'_id': 2, 'a': 12}, ]) actual = self.db.collection.aggregate([ {'$project': {'_id': '$a'}}, ]) self.assertEqual([{'_id': 11}, {'_id': 12}], list(actual)) def test__aggregate_project_missing_fields(self): self.db.collection.insert_one({'_id': 1, 'arr': {'a': 2, 'b': 3}}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('rename_dot', '$arr.c'), ('a', '$arr.a') ])} ]) self.assertEqual([{'a': 2}], list(actual)) def test__aggregate_project_missing_nested_fields(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': {'c': 1}}) actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('_id', False), ('nested_dictionary', {'c': '$b.c', 'd': '$b.d'}) ])} ]) self.assertEqual([{'nested_dictionary': {'c': 1}}], list(actual)) def test__aggregate_project_out(self): self.db.collection.insert_one({'_id': 1, 'arr': {'a': 2, 'b': 3}}) self.db.collection.insert_one({'_id': 2, 'arr': {'a': 4, 'b': 5}}) old_actual = self.db.collection.aggregate([ {'$match': {'_id': 1}}, {'$project': collections.OrderedDict([ ('rename_dot', '$arr.a') ])}, {'$out': 'new_collection'} ]) new_collection = self.db.get_collection('new_collection') new_actual = list(new_collection.find()) expect = [{'_id': 1, 'rename_dot': 2}] self.assertEqual(expect, new_actual) self.assertEqual(expect, list(old_actual)) def test__aggregate_project_out_no_entries(self): self.db.collection.insert_one({'_id': 1, 'arr': {'a': 2, 'b': 3}}) self.db.collection.insert_one({'_id': 2, 'arr': {'a': 4, 'b': 5}}) old_actual = self.db.collection.aggregate([ {'$match': {'_id': 3}}, {'$out': 'new_collection'} ]) new_collection = self.db.get_collection('new_collection') new_actual = list(new_collection.find()) expect = [] self.assertEqual(expect, new_actual) self.assertEqual(expect, list(old_actual)) def test__aggregate_project_include_in_exclusion(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': 3}) with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate([ {'$project': collections.OrderedDict([ ('a', False), ('b', True) ])} ]) self.assertIn('Bad projection specification', str(err.exception)) def test__aggregate_project_exclude_in_inclusion(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': 3}) with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate([ {'$project': collections.OrderedDict([ ('a', True), ('b', False) ])} ]) self.assertIn('Bad projection specification', str(err.exception)) def test__aggregate_project_computed_field_in_exclusion(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': 3}) with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate([ {'$project': {'a': 0, 'b': '$a'}}, ]) self.assertIn('Bad projection specification', str(err.exception)) def test__aggregate_project_id_can_always_be_excluded(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': 3}) actual = self.db.collection.aggregate([ {'$project': collections.OrderedDict([ ('a', True), ('b', True), ('_id', False) ])} ]) self.assertEqual([{'a': 2, 'b': 3}], list(actual)) def test__aggregate_project_inclusion_with_only_id(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': 3}) actual = self.db.collection.aggregate([ {'$project': {'_id': True}} ]) self.assertEqual([{'_id': 1}], list(actual)) def test__aggregate_project_exclusion_with_only_id(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': 3}) actual = self.db.collection.aggregate([ {'$project': {'_id': False}} ]) self.assertEqual([{'a': 2, 'b': 3}], list(actual)) actual = self.db.collection.aggregate([ {'$project': {'_id': 0}} ]) self.assertEqual([{'a': 2, 'b': 3}], list(actual)) def test__aggregate_project_subfield(self): self.db.collection.insert_many([ {'_id': 1, 'a': {'b': 3}, 'other': 1}, {'_id': 2, 'a': {'c': 3}}, {'_id': 3, 'b': {'c': 3}}, {'_id': 4, 'a': 5}, ]) self.assertEqual( [ {'_id': 1, 'a': {'b': 3}}, {'_id': 2, 'a': {}}, {'_id': 3}, {'_id': 4}, ], list(self.db.collection.aggregate([ {'$project': {'a.b': 1}}, ])), ) def test__aggregate_project_subfield_exclude(self): self.db.collection.insert_many([ {'_id': 1, 'a': {'b': 3}, 'other': 1}, {'_id': 2, 'a': {'c': 3}}, {'_id': 3, 'b': {'c': 3}}, {'_id': 4, 'a': 5}, ]) self.assertEqual( [ {'_id': 1, 'a': {}, 'other': 1}, {'_id': 2, 'a': {'c': 3}}, {'_id': 3, 'b': {'c': 3}}, {'_id': 4, 'a': 5}, ], list(self.db.collection.aggregate([ {'$project': {'a.b': 0}}, ])), ) def test__aggregate_project_subfield_conflict(self): self.db.collection.insert_many([ {'_id': 1, 'a': {'b': 3}, 'other': 1}, {'_id': 2, 'a': {'c': 3}}, {'_id': 3, 'b': {'c': 3}}, ]) with self.assertRaises(mongomock.OperationFailure): list(self.db.collection.aggregate([ {'$project': collections.OrderedDict([('a.b', 1), ('a', 1)])}, ])) with self.assertRaises(mongomock.OperationFailure): list(self.db.collection.aggregate([ {'$project': collections.OrderedDict([('a', 1), ('a.b', 1)])}, ])) with self.assertRaises(mongomock.OperationFailure): list(self.db.collection.aggregate([ {'$project': collections.OrderedDict([('d.e.f', 1), ('d.e.f.g', 1)])}, ])) def test__aggregate_project_group_operations(self): self.db.collection.insert_one({'_id': 1, 'a': 2, 'b': 3, 'c': '$d'}) actual = self.db.collection.aggregate([{'$project': { '_id': 1, 'max': {'$max': [5, 9, '$a', None]}, 'min': {'$min': [8, '$a', None, '$b']}, 'avg': {'$avg': [4, '$a', '$b', 'a', 'b']}, 'sum': {'$sum': [4, '$a', None, '$b', 'a', 'b', {'$sum': [0, 1, '$b']}]}, 'maxString': {'$max': [{'$literal': '$b'}, '$c']}, }}]) self.assertEqual( [{'_id': 1, 'max': 9, 'min': 2, 'avg': 3, 'sum': 13, 'maxString': '$d'}], list(actual)) def test__aggregate_project_array_subfield(self): self.db.collection.insert_many([ {'a': [{'b': 1, 'c': 2, 'd': 3}], 'e': 4}, {'a': [{'c': 12, 'd': 13}], 'e': 14}, {'a': [{'b': 21, 'd': 23}], 'e': 24}, {'a': [{'b': 31, 'c': 32}], 'e': 34}, {'a': [{'b': 41}], 'e': 44}, {'a': [{'c': 51}], 'e': 54}, {'a': [{'d': 51}], 'e': 54}, {'a': [{'b': 61, 'c': 62, 'd': 63}, 65, 'foobar', {'b': 66, 'c': 67, 'd': 68}], 'e': 64}, {'a': []}, {'a': [1, 2, 3, 4]}, {'a': 'foobar'}, {'a': 5}, ]) actual = self.db.collection.aggregate([ {'$project': {'a.b': 1, 'a.c': 1, '_id': 0}} ]) self.assertEqual(list(actual), [ {'a': [{'b': 1, 'c': 2}]}, {'a': [{'c': 12}]}, {'a': [{'b': 21}]}, {'a': [{'b': 31, 'c': 32}]}, {'a': [{'b': 41}]}, {'a': [{'c': 51}]}, {'a': [{}]}, {'a': [{'b': 61, 'c': 62}, {'b': 66, 'c': 67}]}, {'a': []}, {'a': []}, {}, {}, ]) def test__aggregate_arithmetic(self): self.db.collection.insert_one({ 'a': 1.5, 'b': 2, 'c': 2, }) actual = self.db.collection.aggregate([{'$project': { 'sum': {'$add': [15, '$a', '$b', '$c']}, 'prod': {'$multiply': [5, '$a', '$b', '$c']}, 'trunc': {'$trunc': '$a'}, }}]) self.assertEqual( [{'sum': 20.5, 'prod': 30, 'trunc': 1}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_string_operation_split_exceptions(self): self.db.collection.insert_one({ 'a': 'Hello', 'b': 'World', 'c': 3 }) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([{'$project': { 'split': {'$split': []} }}]) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([{'$project': { 'split': {'$split': ['$a']} }}]) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([{'$project': { 'split': {'$split': ['$a', '$b', '$c']} }}]) with self.assertRaises(TypeError): self.db.collection.aggregate([{'$project': { 'split': {'$split': ['$a', 1]} }}]) with self.assertRaises(TypeError): self.db.collection.aggregate([{'$project': { 'split': {'$split': [1, '$a']} }}]) def test__aggregate_string_operations(self): self.db.collection.insert_one({ 'a': 'Hello', 'b': 'World', 'c': 3 }) actual = self.db.collection.aggregate([{'$project': { 'concat': {'$concat': ['$a', ' Dear ', '$b']}, 'concat_none': {'$concat': ['$a', None, '$b']}, 'sub1': {'$substr': ['$a', 0, 4]}, 'sub2': {'$substr': ['$a', -1, 3]}, 'sub3': {'$substr': ['$a', 2, -1]}, 'lower': {'$toLower': '$a'}, 'lower_err': {'$toLower': None}, 'split_string_none': {'$split': [None, 'l']}, 'split_string_missing': {'$split': ['$missingField', 'l']}, 'split_delimiter_none': {'$split': ['$a', None]}, 'split_delimiter_missing': {'$split': ['$a', '$missingField']}, 'split': {'$split': ['$a', 'l']}, 'strcasecmp': {'$strcasecmp': ['$a', '$b']}, 'upper': {'$toUpper': '$a'}, 'upper_err': {'$toUpper': None}, }}]) self.assertEqual( [{'concat': 'Hello Dear World', 'concat_none': None, 'sub1': 'Hell', 'sub2': '', 'sub3': 'llo', 'lower': 'hello', 'lower_err': '', 'split_string_none': None, 'split_string_missing': None, 'split_delimiter_none': None, 'split_delimiter_missing': None, 'split': ['He', '', 'o'], 'strcasecmp': -1, 'upper': 'HELLO', 'upper_err': ''}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_match_expr(self): self.db.collection.insert_many([ {'_id': 0, 'a': 2, 'b': 3}, {'_id': 1, 'a': 2, 'b': 2}, {'_id': 2, 'a': 5, 'b': 2}, ]) actual = self.db.collection.aggregate([{'$match': { '$or': [{'$expr': {'$gt': ['$a', 3]}}, {'b': 3}], }}]) self.assertEqual({0, 2}, {d['_id'] for d in actual}) def test__aggregate_regexpmatch(self): self.db.collection.insert_one({ 'a': 'Hello', 'b': 'World', 'c': 3 }) actual = self.db.collection.aggregate([{'$project': { 'Hello': {'$regexMatch': {'input': '$a', 'regex': 'Hel*o'}}, 'Word': {'$regexMatch': {'input': '$b', 'regex': 'Word'}}, 'missing-field': {'$regexMatch': {'input': '$d', 'regex': 'orl'}}, }}]) self.assertEqual( [{'Hello': True, 'Word': False, 'missing-field': False}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_add_fields(self): self.db.collection.insert_one({ 'a': 1.5, 'b': 2, 'c': 2, }) actual = self.db.collection.aggregate([{'$addFields': { 'sum': {'$add': [15, '$a', '$b', '$c']}, }}]) self.assertEqual( [{'sum': 20.5, 'a': 1.5, 'b': 2, 'c': 2}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_set(self): self.db.collection.insert_one({ 'a': 1.5, 'b': 2, 'c': 2, }) actual = self.db.collection.aggregate([{'$set': { 'sum': {'$add': [15, '$a', '$b', '$c']}, 'prod': {'$multiply': [5, '$a', '$b', '$c']}, 'trunc': {'$trunc': '$a'}, }}]) self.assertEqual( [{'sum': 20.5, 'prod': 30, 'trunc': 1, 'a': 1.5, 'b': 2, 'c': 2}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_set_empty(self): self.db.collection.insert_one({ 'a': 1.5, 'b': 2, 'c': 2, }) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([{'$set': {}}]) def test__aggregate_set_override(self): self.db.collection.insert_one({ 'a': 1.5, 'b': 2, 'c': 2, }) actual = self.db.collection.aggregate([{'$set': { 'a': {'$add': [15, '$a', '$b', '$c']}, }}]) self.assertEqual( [{'a': 20.5, 'b': 2, 'c': 2}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_set_error(self): self.db.collection.insert_one({ 'a': 1.5, }) actual = self.db.collection.aggregate([{'$set': { 'sumA': {'$sum': [15, '$a']}, 'sum': {'$sum': [15, '$a', '$b', '$c']}, 'bCopy': '$b', }}]) self.assertEqual( [{'a': 1.5, 'sumA': 16.5, 'sum': 16.5}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_set_subfield(self): self.db.collection.insert_many([ {'a': {'b': 1}}, {'b': 2}, {'a': {'b': 3, 'c': 4}}, {'a': 1}, ]) actual = self.db.collection.aggregate([{'$set': { 'a.c': 3, }}]) self.assertEqual( [ {'a': {'b': 1, 'c': 3}}, {'a': {'c': 3}, 'b': 2}, {'a': {'b': 3, 'c': 3}}, {'a': {'c': 3}}, ], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__strcmp_not_enough_params(self): self.db.collection.insert_one({ 'a': 'Hello', }) with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate([ {'$project': {'cmp': {'$strcasecmp': ['s']}}} ]) self.assertEqual( 'strcasecmp must have 2 items', str(err.exception)) def test__substr_not_enough_params(self): self.db.collection.insert_one({ 'a': 'Hello', }) with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate([ {'$project': {'sub': {'$substr': ['$a', 1]}}} ]) self.assertEqual( 'substr must have 3 items', str(err.exception)) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_tostr_operation_objectid(self): self.db.collection.insert_one({ 'a': ObjectId('5abcfad1fbc93d00080cfe66') }) actual = self.db.collection.aggregate([{'$project': { 'toString': {'$toString': '$a'}, }}]) self.assertEqual( [{'toString': '5abcfad1fbc93d00080cfe66'}], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual]) def test__aggregate_unrecognized(self): self.db.collection.insert_one({}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([ {'$project': {'a': {'$notAValidOperation': True}}} ]) def test__aggregate_not_implemented(self): self.db.collection.insert_one({}) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ {'$project': {'a': {'$stdDevPop': 'scores'}}}, ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ {'$project': {'a': {'$cmp': [1, 2]}}}, ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ {'$project': {'a': {'$setIntersection': [[2], [1, 2, 3]]}}}, ]) def test__aggregate_project_let(self): self.db.collection.insert_one({'_id': 1, 'a': 5, 'b': 2, 'c': 3}) actual = self.db.collection.aggregate([{'$project': { 'a': {'$let': { 'vars': {'a': 1}, 'in': {'$multiply': ['$$a', 3]}, }}, }}]) self.assertEqual([{'_id': 1, 'a': 3}], list(actual)) def test__aggregate_project_rotate(self): self.db.collection.insert_one({'_id': 1, 'a': 1, 'b': 2, 'c': 3}) actual = self.db.collection.aggregate([ {'$project': {'a': '$b', 'b': '$a', 'c': 1}}, ]) self.assertEqual([{'_id': 1, 'a': 2, 'b': 1, 'c': 3}], list(actual)) def test__aggregate_mixed_expression(self): self.db.collection.insert_one({'_id': 1, 'arr': [2, 3]}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([ {'$project': {'a': {'$literal': False, 'hint': False}}}, ]) def test__find_type_array(self): self.db.collection.insert_one({'_id': 1, 'arr': [1, 2]}) self.db.collection.insert_one({'_id': 2, 'arr': {'a': 4, 'b': 5}}) actual = self.db.collection.find( {'arr': {'$type': 'array'}}) expect = [{'_id': 1, 'arr': [1, 2]}] self.assertEqual(expect, list(actual)) def test__find_type_object(self): self.db.collection.insert_one({'_id': 1, 'arr': [1, 2]}) self.db.collection.insert_one({'_id': 2, 'arr': {'a': 4, 'b': 5}}) actual = self.db.collection.find({'arr': {'$type': 'object'}}) expect = [{'_id': 2, 'arr': {'a': 4, 'b': 5}}] self.assertEqual(expect, list(actual)) def test__find_type_number(self): self.db.collection.insert_many([ {'_id': 1, 'a': 'str'}, {'_id': 2, 'a': 1}, {'_id': 3, 'a': {'b': 1}}, {'_id': 4, 'a': 1.2}, {'_id': 5, 'a': None}, ]) actual = self.db.collection.find({'a': {'$type': 'number'}}) expect = [ {'_id': 2, 'a': 1}, {'_id': 4, 'a': 1.2}, ] self.assertEqual(expect, list(actual)) def test__find_unknown_type(self): with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'arr': {'$type': 'unknown-type'}}) def test__find_unimplemented_type(self): with self.assertRaises(NotImplementedError): self.db.collection.find_one({'arr': {'$type': 'javascript'}}) def test__find_eq_none(self): self.db.collection.insert_one({'_id': 1, 'arr': None}) self.db.collection.insert_one({'_id': 2}) actual = self.db.collection.find( {'arr': {'$eq': None}}, projection=['_id'] ) expect = [{'_id': 1}, {'_id': 2}] self.assertEqual(expect, list(actual)) def test__find_too_much_nested(self): self.db.collection.insert_one({'_id': 1, 'arr': {'a': {'b': 1}}}) self.db.collection.insert_one({'_id': 2, 'arr': None}) actual = self.db.collection.find({'arr.a.b': 1}, projection=['_id']) self.assertEqual([{'_id': 1}], list(actual)) def test__find_too_far(self): self.db.collection.insert_one({'_id': 1, 'arr': [0, 1]}) self.db.collection.insert_one({'_id': 2, 'arr': [0]}) actual = self.db.collection.find({'arr.1': 1}, projection=['_id']) self.assertEqual([{'_id': 1}], list(actual)) actual = self.db.collection.find({'arr.1': {'$exists': False}}, projection=['_id']) self.assertEqual([{'_id': 2}], list(actual)) def test__find_elemmatch_none(self): self.db.collection.insert_one({'_id': 1, 'arr': [0, 1]}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'arr': {'$elemMatch': None}}) def test__find_where(self): self.db.collection.insert_many([ {'name': 'Anya'}, {'name': 'Bob'}, ]) with self.assertRaises(NotImplementedError): self.db.collection.find_one({ '$where': 'function() {return (hex_md5(this.name) == "9b53e667f30cd329dca1ec9e6a83e994")}', }) def test__unwind_no_prefix(self): self.db.collection.insert_one({'_id': 1, 'arr': [1, 2]}) with self.assertRaises(ValueError) as err: self.db.collection.aggregate([ {'$unwind': 'arr'} ]) self.assertEqual( "$unwind failed: exception: field path references must be prefixed with a '$' 'arr'", str(err.exception)) def test__unwind_dict_options(self): self.db.collection.insert_one({'_id': 1, 'arr': [1, 2]}) actual = self.db.collection.aggregate([ {'$unwind': {'path': '$arr'}} ]) self.assertEqual( [ {'_id': 1, 'arr': 1}, {'_id': 1, 'arr': 2}, ], list(actual)) def test__unwind_not_array(self): self.db.collection.insert_one({'_id': 1, 'arr': 1}) actual = self.db.collection.aggregate([{'$unwind': '$arr'}]) self.assertEqual([{'_id': 1, 'arr': 1}], list(actual)) def test__unwind_include_array_index(self): self.db.collection.insert_many([ {'_id': 1, 'item': 'ABC', 'sizes': ['S', 'M', 'L']}, {'_id': 2, 'item': 'EFG', 'sizes': []}, {'_id': 3, 'item': 'IJK', 'sizes': 'M'}, {'_id': 4, 'item': 'LMN'}, {'_id': 5, 'item': 'XYZ', 'sizes': None}, ]) actual = self.db.collection.aggregate([ {'$unwind': {'path': '$sizes', 'includeArrayIndex': 'arrayIndex'}} ]) self.assertEqual( [ {'_id': 1, 'item': 'ABC', 'sizes': 'S', 'arrayIndex': 0}, {'_id': 1, 'item': 'ABC', 'sizes': 'M', 'arrayIndex': 1}, {'_id': 1, 'item': 'ABC', 'sizes': 'L', 'arrayIndex': 2}, {'_id': 3, 'item': 'IJK', 'sizes': 'M', 'arrayIndex': None}, ], list(actual)) def test__unwind_preserve_null_and_empty_arrays(self): self.db.collection.insert_many([ {'_id': 1, 'item': 'ABC', 'sizes': ['S', 'M', 'L']}, {'_id': 2, 'item': 'EFG', 'sizes': []}, {'_id': 3, 'item': 'IJK', 'sizes': 'M'}, {'_id': 4, 'item': 'LMN'}, {'_id': 5, 'item': 'XYZ', 'sizes': None}, {'_id': 6, 'item': 'abc', 'sizes': False}, ]) actual = self.db.collection.aggregate([ {'$unwind': {'path': '$sizes', 'preserveNullAndEmptyArrays': True}}, ]) self.assertEqual( [ {'_id': 1, 'item': 'ABC', 'sizes': 'S'}, {'_id': 1, 'item': 'ABC', 'sizes': 'M'}, {'_id': 1, 'item': 'ABC', 'sizes': 'L'}, {'_id': 2, 'item': 'EFG'}, {'_id': 3, 'item': 'IJK', 'sizes': 'M'}, {'_id': 4, 'item': 'LMN'}, {'_id': 5, 'item': 'XYZ', 'sizes': None}, {'_id': 6, 'item': 'abc', 'sizes': False}, ], list(actual)) def test__unwind_preserve_null_and_empty_arrays_on_nested(self): self.db.collection.insert_many([ {'_id': 1, 'item': 'ABC', 'nest': {'sizes': ['S', 'M', 'L']}}, {'_id': 2, 'item': 'EFG', 'nest': {'sizes': []}}, {'_id': 3, 'item': 'IJK', 'nest': {'sizes': 'M'}}, {'_id': 4, 'item': 'LMN', 'nest': {}}, {'_id': 5, 'item': 'XYZ', 'nest': {'sizes': None}}, {'_id': 6, 'item': 'abc', 'nest': {'sizes': False}}, {'_id': 7, 'item': 'abc', 'nest': ['A', 'B', 'C']}, {'_id': 8, 'item': 'abc', 'nest': [{'sizes': 'A'}, {'sizes': ['B', 'C']}]}, {'_id': 9, 'item': 'def'}, ]) actual = self.db.collection.aggregate([ {'$unwind': {'path': '$nest.sizes', 'preserveNullAndEmptyArrays': True}}, ]) self.assertEqual( [ {'_id': 1, 'item': 'ABC', 'nest': {'sizes': 'S'}}, {'_id': 1, 'item': 'ABC', 'nest': {'sizes': 'M'}}, {'_id': 1, 'item': 'ABC', 'nest': {'sizes': 'L'}}, {'_id': 2, 'item': 'EFG', 'nest': {}}, {'_id': 3, 'item': 'IJK', 'nest': {'sizes': 'M'}}, {'_id': 4, 'item': 'LMN', 'nest': {}}, {'_id': 5, 'item': 'XYZ', 'nest': {'sizes': None}}, {'_id': 6, 'item': 'abc', 'nest': {'sizes': False}}, {'_id': 7, 'item': 'abc', 'nest': ['A', 'B', 'C']}, {'_id': 8, 'item': 'abc', 'nest': [{'sizes': 'A'}, {'sizes': ['B', 'C']}]}, {'_id': 9, 'item': 'def'}, ], list(actual)) def test__array_size_non_array(self): self.db.collection.insert_one({'_id': 1, 'arr0': [], 'arr3': [1, 2, 3]}) with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate([ {'$project': {'size': {'$size': 'arr'}}} ]) self.assertEqual( 'The argument to $size must be an array, but was of type: %s' % type('arr'), str(err.exception)) def test__array_size_argument_array(self): self.db.collection.insert_one({'_id': 1, 'arr': [1, 2, 3]}) with self.assertRaises(mongomock.OperationFailure) as err: self.db.collection.aggregate([ {'$project': {'size': {'$size': [1, 2, 3]}}} ]) self.assertEqual( 'Expression $size takes exactly 1 arguments. 3 were passed in.', str(err.exception)) def test__array_size_valid_array(self): self.db.collection.insert_one({'_id': 1, 'arr0': [], 'arr3': [1, 2, 3]}) result1 = self.db.collection.aggregate([ {'$project': {'size': {'$size': '$arr0'}}} ]).next() self.assertEqual(result1['size'], 0) result2 = self.db.collection.aggregate([ {'$project': {'size': {'$size': '$arr3'}}} ]).next() self.assertEqual(result2['size'], 3) def test__array_size_valid_argument_array(self): self.db.collection.insert_one({'_id': 1, 'arr': [1, 2, 3]}) result1 = self.db.collection.aggregate([ {'$project': {'size': {'$size': [[1, 2]]}}} ]).next() self.assertEqual(result1['size'], 2) result2 = self.db.collection.aggregate([ {'$project': {'size': {'$size': ['$arr']}}} ]).next() self.assertEqual(result2['size'], 3) result3 = self.db.collection.aggregate([ {'$project': {'size': {'$size': [{'$literal': [1, 2, 3, 4, 5]}]}}} ]).next() self.assertEqual(result3['size'], 5) def test__array_size_valid_expression(self): self.db.collection.insert_one({'_id': 1, 'arr': [1, 2, 3]}) result = self.db.collection.aggregate([ {'$project': {'size': {'$size': {'$literal': [1, 2, 3, 4]}}}} ]).next() self.assertEqual(result['size'], 4) def test__aggregate_project_out_replace(self): self.db.collection.insert_one({'_id': 1, 'arr': {'a': 2, 'b': 3}}) self.db.collection.insert_one({'_id': 2, 'arr': {'a': 4, 'b': 5}}) new_collection = self.db.get_collection('new_collection') new_collection.insert_one({'_id': 3}) self.db.collection.aggregate([ {'$match': {'_id': 1}}, { '$project': { 'rename_dot': '$arr.a' } }, {'$out': 'new_collection'} ]) actual = list(new_collection.find()) expect = [{'_id': 1, 'rename_dot': 2}] self.assertEqual(expect, actual) def test__all_elemmatch(self): self.db.collection.insert_many([ { '_id': 5, 'code': 'xyz', 'tags': ['school', 'book', 'bag', 'headphone', 'appliance'], 'qty': [ {'size': 'S', 'num': 10, 'color': 'blue'}, {'size': 'M', 'num': 45, 'color': 'blue'}, {'size': 'L', 'num': 100, 'color': 'green'}, ], }, { '_id': 6, 'code': 'abc', 'tags': ['appliance', 'school', 'book'], 'qty': [ {'size': '6', 'num': 100, 'color': 'green'}, {'size': '6', 'num': 50, 'color': 'blue'}, {'size': '8', 'num': 100, 'color': 'brown'}, ], }, { '_id': 7, 'code': 'efg', 'tags': ['school', 'book'], 'qty': [ {'size': 'S', 'num': 10, 'color': 'blue'}, {'size': 'M', 'num': 100, 'color': 'blue'}, {'size': 'L', 'num': 100, 'color': 'green'}, ], }, { '_id': 8, 'code': 'ijk', 'tags': ['electronics', 'school'], 'qty': [ {'size': 'M', 'num': 100, 'color': 'green'}, ], }, ]) filters = { 'qty': { '$all': [ {'$elemMatch': {'size': 'M', 'num': {'$gt': 50}}}, {'$elemMatch': {'num': 100, 'color': 'green'}}, ], }, } results = self.db.collection.find(filters) self.assertEqual([doc['_id'] for doc in results], [7, 8]) def test__all_size(self): self.db.collection.insert_many([ { 'code': 'ijk', 'tags': ['electronics', 'school'], 'qty': [{'size': 'M', 'num': 100, 'color': 'green'}], }, { 'code': 'efg', 'tags': ['school', 'book'], 'qty': [ {'size': 'S', 'num': 10, 'color': 'blue'}, {'size': 'M', 'num': 100, 'color': 'blue'}, {'size': 'L', 'num': 100, 'color': 'green'}, ], }, ]) self.assertEqual(1, self.db.collection.count_documents({'qty.size': {'$all': ['M', 'L']}})) def test__filter_eq_on_array(self): """$eq on array matches if one element of the array matches.""" collection = self.db.collection collection.insert_many([ {'_id': 1, 'shape': [{'color': 'red'}]}, {'_id': 2, 'shape': [{'color': 'yellow'}]}, {'_id': 3, 'shape': [{'color': 'red'}, {'color': 'yellow'}]}, {'_id': 4, 'shape': [{'size': 3}]}, {'_id': 5}, {'_id': 6, 'shape': {'color': ['red', 'yellow']}}, ]) results = self.db.collection.find({'shape.color': {'$eq': 'red'}}) self.assertEqual([1, 3, 6], [doc['_id'] for doc in results]) # testing eq operation with null as value results = self.db.collection.find({'shape.color': {'$eq': None}}) self.assertEqual([4, 5], [doc['_id'] for doc in results]) results = self.db.collection.find({'shape.color': None}) self.assertEqual([4, 5], [doc['_id'] for doc in results]) def test__filter_ne_on_array(self): """$ne and $nin on array only matches if no element of the array matches.""" collection = self.db.collection collection.insert_many([ {'_id': 1, 'shape': [{'color': 'red'}]}, {'_id': 2, 'shape': [{'color': 'yellow'}]}, {'_id': 3, 'shape': [{'color': 'red'}, {'color': 'yellow'}]}, {'_id': 4, 'shape': [{'size': 3}]}, {'_id': 5}, {'_id': 6, 'shape': {'color': ['red', 'yellow']}}, ]) # $ne results = self.db.collection.find({'shape.color': {'$ne': 'red'}}) self.assertEqual([2, 4, 5], [doc['_id'] for doc in results]) # $ne results = self.db.collection.find({'shape.color': {'$ne': ['red', 'yellow']}}) self.assertEqual([1, 2, 3, 4, 5], [doc['_id'] for doc in results]) # $nin results = self.db.collection.find({'shape.color': {'$nin': ['blue', 'red']}}) self.assertEqual([2, 4, 5], [doc['_id'] for doc in results]) def test__filter_ne_multiple_keys(self): """Using $ne and another operator.""" collection = self.db.collection collection.insert_many([ {'_id': 1, 'cases': [{'total': 1}]}, {'_id': 2, 'cases': [{'total': 2}]}, {'_id': 3, 'cases': [{'total': 3}]}, {'_id': 4, 'cases': []}, {'_id': 5}, ]) # $ne results = self.db.collection.find({'cases.total': {'$gt': 1, '$ne': 3}}) self.assertEqual([2], [doc['_id'] for doc in results]) # $nin results = self.db.collection.find({'cases.total': {'$gt': 1, '$nin': [1, 3]}}) self.assertEqual([2], [doc['_id'] for doc in results]) def test__filter_objects_comparison(self): collection = self.db.collection query = {'counts': {'$gt': {'circles': 1}}} collection.insert_many([ # Document kept: circles' value 3 is greater than 1. {'_id': 1, 'counts': {'circles': 3}}, # Document kept: the first key, squares, is greater than circles. {'_id': 2, 'counts': {'squares': 0}}, # Document dropped: the first key, arrows, is smaller than circles. {'_id': 3, 'counts': {'arrows': 15}}, # Document dropped: the dicts are equal. {'_id': 4, 'counts': {'circles': 1}}, # Document kept: the first item is equal, and there is an additional item. {'_id': 5, 'counts': collections.OrderedDict([ ('circles', 1), ('arrows', 15), ])}, # Document dropped: same as above, but order matters. {'_id': 6, 'counts': collections.OrderedDict([ ('arrows', 15), ('circles', 1), ])}, # Document dropped: the value is missing. {'_id': 7}, # Document dropped: there is less items. {'_id': 8, 'counts': {}}, # Document kept: strings are greater than numbers. {'_id': 9, 'counts': {'circles': 'three'}}, # Document dropped: None is less than numbers. {'_id': 10, 'counts': {'circles': None}}, # Document kept: ObjectIds are more than numbers. {'_id': 11, 'counts': {'circles': mongomock.ObjectId()}}, # Document kept: datetimes are more than numbers. {'_id': 12, 'counts': {'circles': datetime.now()}}, # Document kept: BinData are more than numbers. {'_id': 13, 'counts': {'circles': b'binary'}}, ]) results = collection.find(query) self.assertEqual( {1, 2, 5, 9, 11, 12, 13}, {doc['_id'] for doc in results}) query = {'counts': {'$gt': {'circles': re.compile('3')}}} self.assertFalse(list(collection.find(query))) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__filter_bson_regex(self): self.db.collection.insert_many([ {'_id': 'a'}, {'_id': 'A'}, {'_id': 'abc'}, {'_id': 'b'}, {'_id': 'ba'}, ]) results = self.db.collection.find({'_id': Regex('^a', 'i')}) self.assertEqual({'a', 'A', 'abc'}, {doc['_id'] for doc in results}) self.db.tada.drop() self.db.tada.insert_one({'a': 'TADA'}) self.db.tada.insert_one({'a': 'TA\nDA'}) self.assertTrue(self.db.tada.find_one({'a': { '$regex': Regex('tada', re.IGNORECASE), }})) self.assertTrue(self.db.tada.find_one({'a': collections.OrderedDict([ ('$regex', Regex('tada')), ('$options', 'i'), ])})) self.assertTrue(self.db.tada.find_one({'a': collections.OrderedDict([ ('$regex', Regex('tada', re.IGNORECASE)), ('$options', 'm'), ])})) def test__filter_objects_comparison_unknown_type(self): self.db.collection.insert_one({'counts': 3}) with self.assertRaises(NotImplementedError): self.db.collection.find_one({'counts': {'$gt': str}}) def test__filter_objects_nested_comparison(self): collection = self.db.collection query = {'counts': {'$gt': {'circles': {'blue': 1}}}} collection.insert_many([ # Document kept: circles' value {'blue': 3} is greater than {'blue': 1}. {'_id': 1, 'counts': {'circles': {'blue': 3}}}, # Document kept: the first key, squares, is greater than circles. {'_id': 2, 'counts': {'squares': {}}}, # Document dropped: the first key, arrows, is smaller than circles. {'_id': 3, 'counts': {'arrows': {'blue': 2}}}, # Document dropped: circles' value {} is less than {'blue': 1}. {'_id': 4, 'counts': {'circles': {}}}, # Document kept: the first value type is greater than the type of {'blue' : 1}. {'_id': 5, 'counts': {'arrows': True}}, ]) results = collection.find(query) self.assertEqual({1, 2, 5}, {doc['_id'] for doc in results}) def test_filter_not_bad_value(self): with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'a': {'$not': 3}}) with self.assertRaises(mongomock.OperationFailure): self.db.collection.find_one({'a': {'$not': {'b': 3}}}) def test_filter_not_regex(self): self.db.collection.insert_many([ {'_id': 1, 'a': 'b'}, # Starts with a: should be excluded. {'_id': 2, 'a': 'a'}, {'_id': 3, 'a': 'ba'}, {'_id': 4} ]) results = self.db.collection.find({'a': {'$not': {'$regex': '^a'}}}) self.assertEqual({1, 3, 4}, {doc['_id'] for doc in results}) def test_insert_many_bulk_write_error(self): collection = self.db.collection with self.assertRaises(mongomock.BulkWriteError) as cm: collection.insert_many([ {'_id': 1}, {'_id': 1} ]) self.assertIn('batch op errors occurred', str(cm.exception)) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test_insert_many_bulk_write_error_details(self): collection = self.db.collection with self.assertRaises(mongomock.BulkWriteError) as cm: collection.insert_many([ {'_id': 1}, {'_id': 1} ]) self.assertEqual(65, cm.exception.code) write_errors = cm.exception.details['writeErrors'] self.assertEqual([11000], [error.get('code') for error in write_errors]) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test_insert_bson_validation(self): collection = self.db.collection with self.assertRaises(InvalidDocument) as cm: collection.insert_one({'a': {'b'}}) if version.parse(pymongo.version) < version.parse('3.8'): return self.assertEqual(str(cm.exception), "cannot encode object: {'b'}, of type: ") @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test_insert_bson_invalid_encode_type(self): collection = self.db.collection with self.assertRaises(InvalidDocument) as cm: collection.insert_one({'$foo': 'bar'}) self.assertEqual(str(cm.exception), 'Top-level field names cannot start with the "$"' ' sign (found: $foo)') with self.assertRaises(InvalidDocument): collection.insert_one({'foo': {'foo\0bar': 'bar'}}) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test_update_bson_invalid_encode_type(self): self.db.collection.insert_one({'a': 1}) with self.assertRaises(InvalidDocument): self.db.collection.update_one(filter={'a': 1}, update={'$set': {'$a': 2}}) @skipIf(helpers.PYMONGO_VERSION >= version.parse('3.6'), 'pymongo has less strict naming requirements after v3.6') @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test_insert_bson_special_characters(self): collection = self.db.collection collection.insert_one({'foo.bar.zoo': {'foo.bar': '$zoo'}, 'foo.$bar': 'zoo'}) actual = self.db.collection.find_one() assert actual['foo.bar.zoo'] == {'foo.bar': '$zoo'} assert actual['foo.$bar'] == 'zoo' @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__update_invalid_encode_type(self): self.db.collection.insert_one({'_id': 1, 'foo': 'bar'}) with self.assertRaises(InvalidDocument): self.db.collection.update_one({}, {'$set': {'foo': {'bar'}}}) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__replace_invalid_encode_type(self): self.db.collection.insert_one({'_id': 1, 'foo': 'bar'}) with self.assertRaises(InvalidDocument): self.db.collection.replace_one({}, {'foo': {'bar'}}) def test_aggregate_unwind_push_first(self): collection = self.db.collection collection.insert_many( [ { '_id': 1111, 'a': [ { 'class': '03', 'a': [ { 'b': '030502', 'weight': 100.0 }, { 'b': '030207', 'weight': 100.0 } ] } ], 'id': 'ooo', 'update_time': 1111 }, { '_id': 22222, 'a': [ { 'class': '03', 'a': [ { 'b': '030502', 'weight': 99.0 }, { 'b': '0302071', 'weight': 100.0 } ] } ], 'id': 'ooo', 'update_time': 1222 } ] ) actual = collection.aggregate( [ {'$sort': {'update_time': -1}}, {'$match': {'a': {'$ne': None}}}, { '$group': { '_id': '$id', 'update_time': {'$first': '$update_time'}, 'a': {'$first': '$a'} } }, {'$unwind': '$a'}, {'$unwind': '$a.a'}, { '$group': { '_id': '$_id', 'update_time': {'$first': '$update_time'}, 'a': { '$push': { 'b': '$a.a.b', 'weight': '$a.a.weight' } } } }, {'$out': 'ooo'} ], allowDiskUse=True) expect = [ { 'update_time': 1222, 'a': [ {'weight': 99.0, 'b': '030502'}, {'weight': 100.0, 'b': '0302071'}], '_id': 'ooo' }] self.assertEqual(expect, list(actual)) def test__agregate_first_on_empty(self): collection = self.db.collection collection.insert_many([ {'a': 1, 'b': 1}, {'a': 1, 'b': 2}, {'a': 2}, {'a': 2}, ]) actual = collection.aggregate([{'$group': { '_id': '$a', 'firstB': {'$first': '$b'}, 'lastB': {'$last': '$b'}, }}]) expect = [ {'_id': 1, 'firstB': 1, 'lastB': 2}, {'_id': 2, 'firstB': None, 'lastB': None}, ] self.assertEqual(expect, list(actual)) def test__aggregate_group_scalar_key(self): collection = self.db.collection collection.insert_many( [ {'a': 2, 'b': 3, 'c': 4}, {'a': 2, 'b': 3, 'c': 5}, {'a': 1, 'b': 1, 'c': 1}, ] ) actual = collection.aggregate([ {'$group': {'_id': '$a'}}, ]) self.assertCountEqual([{'_id': 1}, {'_id': 2}], list(actual)) @skipIf( helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above do not specify uuid encoding') def test__aggregate_group_uuid_key(self): collection = self.db.collection collection.insert_many( [ {'uuid_field': uuid.uuid4()}, {'uuid_field': uuid.uuid4()}, ] ) actual = collection.aggregate([ {'$group': {'_id': '$uuid_field'}}, ]) self.assertEqual(2, len(list(actual))) def test__aggregate_group_missing_key(self): collection = self.db.collection collection.insert_many( [ {'a': 1}, {}, {'a': None}, ] ) actual = collection.aggregate([ {'$group': {'_id': '$a'}}, ]) self.assertCountEqual([{'_id': 1}, {'_id': None}], list(actual)) def test__aggregate_group_dict_key(self): collection = self.db.collection collection.insert_many( [ {'a': 2, 'b': 3, 'c': 4}, {'a': 2, 'b': 3, 'c': 5}, {'a': 1, 'b': 1, 'c': 1}, ] ) actual = collection.aggregate([ {'$group': {'_id': {'a': '$a', 'b': '$b'}}}, ]) self.assertCountEqual( [{'_id': {'a': 1, 'b': 1}}, {'_id': {'a': 2, 'b': 3}}], list(actual) ) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_group_dbref_key(self): collection = self.db.collection collection.insert_many( [ {'myref': DBRef('a', '1')}, {'myref': DBRef('a', '1')}, {'myref': DBRef('a', '2')}, {'myref': DBRef('b', '1')}, ] ) actual = collection.aggregate([ {'$group': {'_id': '$myref'}} ]) expect = [ {'_id': DBRef('b', '1')}, {'_id': DBRef('a', '2')}, {'_id': DBRef('a', '1')}, ] self.assertCountEqual(expect, list(actual)) def test__aggregate_group_sum(self): collection = self.db.collection collection.insert_many([ {'group': 'one'}, {'group': 'one'}, {'group': 'one', 'data': None}, {'group': 'one', 'data': 0}, {'group': 'one', 'data': 2}, {'group': 'one', 'data': {'a': 1}}, {'group': 'one', 'data': [1, 2]}, {'group': 'one', 'data': [3, 4]}, ]) actual = collection.aggregate([{'$group': { '_id': '$group', 'count': {'$sum': 1}, 'countData': {'$sum': {'$cond': ['$data', 1, 0]}}, 'countDataExists': {'$sum': {'$cond': { 'if': {'$gt': ['$data', None]}, 'then': 1, 'else': 0, }}}, }}]) expect = [{ '_id': 'one', 'count': 8, 'countData': 4, 'countDataExists': 5, }] self.assertEqual(expect, list(actual)) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_group_sum_for_decimal(self): collection = self.db.collection collection.drop() decimal_value = decimal128.Decimal128('4') collection.insert_one({'_id': 1, 'a': 2, 'b': 3, 'c': '$d', 'd': decimal_value}) actual = collection.aggregate([{'$project': { '_id': 0, 'sum': {'$sum': [4, 2, None, 3, '$a', '$b', '$d', {'$sum': [0, 1, '$b']}]}, 'sum_no_decimal': {'$sum': [4, 2, None, 3, '$a', '$b', {'$sum': [0, 1, '$b']}]}, 'sumNone': {'$sum': ['a', None]}, }}]) expect = [{ 'sum': decimal128.Decimal128('22'), 'sum_no_decimal': 18, 'sumNone': 0, }] self.assertEqual(expect, list(actual)) def test__aggregate_bucket(self): collection = self.db.collection collection.drop() collection.insert_many([ { '_id': 1, 'title': 'The Pillars of Society', 'artist': 'Grosz', 'year': 1926, 'price': 199.99, }, { '_id': 2, 'title': 'Melancholy III', 'artist': 'Munch', 'year': 1902, 'price': 200.00, }, { '_id': 3, 'title': 'Dancer', 'artist': 'Miro', 'year': 1925, 'price': 76.04, }, { '_id': 4, 'title': 'The Great Wave off Kanagawa', 'artist': 'Hokusai', 'price': 167.30, }, { '_id': 5, 'title': 'The Persistence of Memory', 'artist': 'Dali', 'year': 1931, 'price': 483.00, }, { '_id': 6, 'title': 'Composition VII', 'artist': 'Kandinsky', 'year': 1913, 'price': 385.00, }, { '_id': 7, 'title': 'The Scream', 'artist': 'Munch', 'year': 1893, # No price }, { '_id': 8, 'title': 'Blue Flower', 'artist': "O'Keefe", 'year': 1918, 'price': 118.42, }, ]) actual = collection.aggregate([{'$bucket': { 'groupBy': '$price', 'boundaries': [0, 200, 400], 'default': 'Other', 'output': { 'count': {'$sum': 1}, 'titles': {'$push': '$title'}, }, }}]) expect = [ { '_id': 0, 'count': 4, 'titles': [ 'The Pillars of Society', 'Dancer', 'The Great Wave off Kanagawa', 'Blue Flower' ], }, { '_id': 200, 'count': 2, 'titles': [ 'Melancholy III', 'Composition VII' ], }, { '_id': 'Other', 'count': 2, 'titles': [ 'The Persistence of Memory', 'The Scream', ], }, ] self.assertEqual(expect, list(actual)) def test__aggregate_bucket_no_default(self): collection = self.db.collection collection.drop() collection.insert_many([ { '_id': 1, 'title': 'The Pillars of Society', 'artist': 'Grosz', 'year': 1926, 'price': 199.99, }, { '_id': 2, 'title': 'Melancholy III', 'artist': 'Munch', 'year': 1902, 'price': 280.00, }, { '_id': 3, 'title': 'Dancer', 'artist': 'Miro', 'year': 1925, 'price': 76.04, }, ]) actual = collection.aggregate([{'$bucket': { 'groupBy': '$price', 'boundaries': [0, 200, 400, 600], }}]) expect = [ { '_id': 0, 'count': 2, }, { '_id': 200, 'count': 1, }, ] self.assertEqual(expect, list(actual)) with self.assertRaises(mongomock.OperationFailure): collection.aggregate([{'$bucket': { 'groupBy': '$price', 'boundaries': [0, 150], }}]) def test__aggregate_bucket_wrong_options(self): options = [ {}, {'groupBy': '$price', 'boundaries': [0, 1], 'extraOption': 2}, {'groupBy': '$price'}, {'boundaries': [0, 1]}, {'groupBy': '$price', 'boundaries': 3}, {'groupBy': '$price', 'boundaries': [0]}, {'groupBy': '$price', 'boundaries': [1, 0]}, ] for option in options: with self.assertRaises(mongomock.OperationFailure, msg=option): self.db.collection.aggregate([{'$bucket': option}]) def test__aggregate_subtract_dates(self): self.db.collection.insert_one({ 'date': datetime(2014, 7, 4, 13, 0, 4, 20000), }) actual = self.db.collection.aggregate([{'$project': { 'since': {'$subtract': ['$date', datetime(2014, 7, 4, 13, 0, 0, 20)]}, }}]) self.assertEqual([4020], [d['since'] for d in actual]) def test__aggregate_subtract_milliseconds_from_date(self): self.db.collection.insert_one({ 'date': datetime(2014, 7, 4, 13, 0, 4, 20000), }) actual = self.db.collection.aggregate([{'$project': { 'since': {'$subtract': ['$date', 1000]}, }}]) self.assertEqual([datetime(2014, 7, 4, 13, 0, 3, 20000)], [d['since'] for d in actual]) def test__aggregate_system_variables(self): self.db.collection.insert_many([ {'_id': 1}, {'_id': 2, 'parent_id': 1}, {'_id': 3, 'parent_id': 1}, ]) actual = self.db.collection.aggregate([ {'$match': {'parent_id': {'$in': [1]}}}, {'$group': {'_id': 1, 'docs': {'$push': '$$ROOT'}}}, ]) self.assertEqual( [{'_id': 1, 'docs': [ {'_id': 2, 'parent_id': 1}, {'_id': 3, 'parent_id': 1}, ]}], list(actual)) def test__aggregate_select_nested(self): self.db.collection.insert_one({ 'base_value': 100, 'values_list': [ {'updated_value': 5}, {'updated_value': 15}, ], 'nested_value': { 'updated_value': 7, }, }) actual = list(self.db.collection.aggregate([ {'$project': { 'select_1': '$values_list.1.updated_value', 'select_nested': '$nested_value.updated_value', 'select_array': '$values_list.updated_value', }}, ])) self.assertEqual(1, len(actual), msg=actual) actual[0].pop('_id') self.assertEqual({ 'select_1': 15, 'select_nested': 7, 'select_array': [5, 15], }, actual[0]) def test__aggregate_concatArrays(self): self.db.collection.insert_one({ 'a': [1, 2], 'b': ['foo', 'bar', 'baz'], 'c': { 'arr1': [123], } }) actual = self.db.collection.aggregate([{ '$project': { 'concat': {'$concatArrays': ['$a', ['#', '*'], '$c.arr1', '$b']}, 'concat_array_expression': {'$concatArrays': '$b'}, 'concat_tuples': {'$concatArrays': ((1, 2, 3), (1,))}, 'concat_none': {'$concatArrays': None}, 'concat_missing_field': {'$concatArrays': '$foo'}, 'concat_none_item': {'$concatArrays': ['$a', None, '$b']}, 'concat_missing_field_item': {'$concatArrays': [[1, 2, 3], '$c.arr2']} } }]) self.assertEqual( [{ 'concat': [1, 2, '#', '*', 123, 'foo', 'bar', 'baz'], 'concat_array_expression': ['foo', 'bar', 'baz'], 'concat_tuples': [1, 2, 3, 1], 'concat_none': None, 'concat_missing_field': None, 'concat_none_item': None, 'concat_missing_field_item': None }], [{k: v for k, v in doc.items() if k != '_id'} for doc in actual] ) def test__aggregate_concatArrays_exceptions(self): self.db.collection.insert_one({ 'a': { 'arr1': [123] } }) pipeline_parameter_not_array = [ { '$project': { 'concat_parameter_not_array': {'$concatArrays': 42} } } ] with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate(pipeline_parameter_not_array) pipeline_item_not_array = [ { '$project': { 'concat_item_not_array': {'$concatArrays': [[1, 2], '$a']} } } ] with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate(pipeline_item_not_array) def test__aggregate_filter(self): collection = self.db.collection collection.drop() collection.insert_many([ { '_id': 0, 'items': [ {'item_id': 43, 'quantity': 2, 'price': 10}, {'item_id': 2, 'quantity': 1, 'price': 240}, ], }, { '_id': 1, 'items': [ {'item_id': 23, 'quantity': 3, 'price': 110}, {'item_id': 103, 'quantity': 4, 'price': 5}, {'item_id': 38, 'quantity': 1, 'price': 300}, ], }, { '_id': 2, 'items': [ {'item_id': 4, 'quantity': 1, 'price': 23}, ], }, ]) actual = collection.aggregate([{'$project': {'filtered_items': {'$filter': { 'input': '$items', 'as': 'item', 'cond': {'$gte': ['$$item.price', 100]}, }}}}]) expect = [ { '_id': 0, 'filtered_items': [ {'item_id': 2, 'quantity': 1, 'price': 240}, ], }, { '_id': 1, 'filtered_items': [ {'item_id': 23, 'quantity': 3, 'price': 110}, {'item_id': 38, 'quantity': 1, 'price': 300}, ], }, {'_id': 2, 'filtered_items': []}, ] self.assertEqual(expect, list(actual)) def test__aggregate_filter_wrong_options(self): options = [ 3, ['$items', {'$gte': ['$$item.price', 100]}], {}, {'input': '$items'}, {'cond': {'$gte': ['$$item.price', 100]}}, {'input': '$items', 'cond': {'$$this.filter'}, 'extraOption': 2}, ] self.db.collection.insert_one({}) for option in options: with self.assertRaises(mongomock.OperationFailure, msg=option): self.db.collection.aggregate( [{'$project': {'filtered_items': {'$filter': option}}}]) def test__aggregate_map(self): collection = self.db.collection collection.insert_one({ 'array': [1, 2, 3, 4], }) actual = collection.aggregate([{'$project': { '_id': 0, 'array': {'$map': { 'input': '$array', 'in': {'$multiply': ['$$this', '$$this']}, }}, 'custom_variable': {'$map': { 'input': '$array', 'as': 'self', 'in': {'$multiply': ['$$self', '$$self']}, }}, 'empty': {'$map': { 'input': [], 'in': {'$multiply': ['$$this', '$$this']}, }}, 'null': {'$map': { 'input': None, 'in': '$$this', }}, 'missing': {'$map': { 'input': '$missing.key', 'in': '$$this', }}, }}]) expect = [{ 'array': [1, 4, 9, 16], 'custom_variable': [1, 4, 9, 16], 'empty': [], 'null': None, 'missing': None, }] self.assertEqual(expect, list(actual)) def test__aggregate_map_errors(self): collection = self.db.collection collection.insert_one({}) data = ( ( [], '$map only supports an object as its argument', ), ( {}, "Missing 'input' parameter to $map", ), ( # Check that the following message is raised before the error # on the type of input {'input': 'foo'}, "Missing 'in' parameter to $map", ), ( # NOTE: actual type is omitted in the expected message because # of difference in string representations for types between # Python 2 and Python 3. # TODO(guludo): We should output the type name that is output # by the real mongodb. {'input': 'foo', 'in': '$$this'}, 'input to $map must be an array not', ), ( {'input': [], 'in': '$$this', 'foo': 1}, 'Unrecognized parameter to $map: foo', ), ) for op, msg in data: with self.assertRaises(mongomock.OperationFailure) as cm: collection.aggregate([{'$project': {'x': {'$map': op}}}]) self.assertIn(msg, str(cm.exception)) def test__aggregate_slice(self): self.db.collection.drop() collection = self.db.collection self.db.collection.insert_many([ { '_id': 0, 'items': list(range(10)), }, { '_id': 1, 'items': list(range(10, 20)), }, { '_id': 2, 'items': list(range(20, 30)), }, ]) empty = [ {'_id': 0, 'slice': []}, {'_id': 1, 'slice': []}, {'_id': 2, 'slice': []} ] self.assertEqual(empty, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', 0]}}} ]))) first_five = [ {'_id': 0, 'slice': list(range(5))}, {'_id': 1, 'slice': list(range(10, 15))}, {'_id': 2, 'slice': list(range(20, 25))} ] self.assertEqual(first_five, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', 5]}}} ]))) self.assertEqual(first_five, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', 0, 5]}}} ]))) self.assertEqual(first_five, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', -10, 5]}}} ]))) full = [ {'_id': 0, 'slice': list(range(10))}, {'_id': 1, 'slice': list(range(10, 20))}, {'_id': 2, 'slice': list(range(20, 30))} ] self.assertEqual(full, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', 10]}}} ]))) self.assertEqual(full, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', 10000]}}} ]))) self.assertEqual(full, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', 0, 10000]}}} ]))) self.assertEqual(full, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', -10]}}} ]))) self.assertEqual(full, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', -10000]}}} ]))) self.assertEqual(full, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', -10, 10]}}} ]))) last_five = [ {'_id': 0, 'slice': list(range(5, 10))}, {'_id': 1, 'slice': list(range(15, 20))}, {'_id': 2, 'slice': list(range(25, 30))} ] self.assertEqual(last_five, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', 5, 5]}}} ]))) self.assertEqual(last_five, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', -5]}}} ]))) self.assertEqual(last_five, list(collection.aggregate([ {'$project': {'slice': {'$slice': ['$items', -5, 5]}}} ]))) def test__aggregate_slice_wrong(self): # inserts an item otherwise the slice is not even evaluated self.db.collection.insert_one( { '_id': 0, 'items': list(range(10)), } ) options = [ {}, [], [0], [0, 0], ['$items'], ['$items', 0, 0], ['$items', 1, 0], ['$items', 0, -1], ['$items', -1, -1], ['items', 0], ['items', 'foo'], ['items', 0, 'bar'], '$items', ] for option in options: with self.assertRaises(mongomock.OperationFailure, msg=option): self.db.collection.aggregate( [{'$project': {'slice': {'$slice': option}}}]) def test__write_concern(self): self.assertEqual({}, self.db.collection.write_concern.document) self.assertTrue(self.db.collection.write_concern.is_server_default) self.assertTrue(self.db.collection.write_concern.acknowledged) collection = self.db.get_collection('a', write_concern=WriteConcern( w=2, wtimeout=100, j=True, fsync=False)) self.assertEqual({ 'fsync': False, 'j': True, 'w': 2, 'wtimeout': 100, }, collection.write_concern.document) # http://api.mongodb.com/python/current/api/pymongo/write_concern.html#pymongo.write_concern.WriteConcern.document collection.write_concern.document.pop('wtimeout') self.assertEqual({ 'fsync': False, 'j': True, 'w': 2, 'wtimeout': 100, }, collection.write_concern.document, msg='Write concern is immutable') def test__read_preference_default(self): # Test various properties of the default read preference. self.assertEqual(0, self.db.collection.read_preference.mode) self.assertEqual('primary', self.db.collection.read_preference.mongos_mode) self.assertEqual({'mode': 'primary'}, self.db.collection.read_preference.document) self.assertEqual('Primary', self.db.collection.read_preference.name) self.assertEqual([{}], self.db.collection.read_preference.tag_sets) self.assertEqual(-1, self.db.collection.read_preference.max_staleness) self.assertEqual(0, self.db.collection.read_preference.min_wire_version) collection = self.db.get_collection('a', read_preference=self.db.collection.read_preference) self.assertEqual('primary', collection.read_preference.mongos_mode) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__read_preference(self): collection = self.db.get_collection('a', read_preference=ReadPreference.NEAREST) self.assertEqual('nearest', collection.read_preference.mongos_mode) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above') def test__bulk_write_unordered(self): bulk = self.db.collection.initialize_unordered_bulk_op() bulk.insert({'_id': 1}) bulk.insert({'_id': 2}) bulk.insert({'_id': 1}) bulk.insert({'_id': 3}) bulk.insert({'_id': 1}) with self.assertRaises(mongomock.BulkWriteError) as err_context: bulk.execute() self.assertCountEqual([1, 2, 3], [d['_id'] for d in self.db.collection.find()]) self.assertEqual(3, err_context.exception.details['nInserted']) self.assertEqual([2, 4], [e['index'] for e in err_context.exception.details['writeErrors']]) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_unordered_with_bulk_write(self): with self.assertRaises(mongomock.BulkWriteError) as err_context: self.db.collection.bulk_write([ pymongo.InsertOne({'_id': 1}), pymongo.InsertOne({'_id': 2}), pymongo.InsertOne({'_id': 1}), pymongo.InsertOne({'_id': 3}), pymongo.InsertOne({'_id': 1}), ], ordered=False) self.assertCountEqual([1, 2, 3], [d['_id'] for d in self.db.collection.find()]) self.assertEqual(3, err_context.exception.details['nInserted']) self.assertEqual([2, 4], [e['index'] for e in err_context.exception.details['writeErrors']]) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above') def test__bulk_write_ordered(self): bulk = self.db.collection.initialize_ordered_bulk_op() bulk.insert({'_id': 1}) bulk.insert({'_id': 2}) bulk.insert({'_id': 1}) bulk.insert({'_id': 3}) bulk.insert({'_id': 1}) with self.assertRaises(mongomock.BulkWriteError) as err_context: bulk.execute() self.assertCountEqual([1, 2], [d['_id'] for d in self.db.collection.find()]) self.assertEqual(2, err_context.exception.details['nInserted']) self.assertEqual([2], [e['index'] for e in err_context.exception.details['writeErrors']]) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__bulk_write_ordered_with_bulk_write(self): with self.assertRaises(mongomock.BulkWriteError) as err_context: self.db.collection.bulk_write([ pymongo.InsertOne({'_id': 1}), pymongo.InsertOne({'_id': 2}), pymongo.InsertOne({'_id': 1}), pymongo.InsertOne({'_id': 3}), pymongo.InsertOne({'_id': 1}), ]) self.assertCountEqual([1, 2], [d['_id'] for d in self.db.collection.find()]) self.assertEqual(2, err_context.exception.details['nInserted']) self.assertEqual([2], [e['index'] for e in err_context.exception.details['writeErrors']]) def test__set_union(self): collection = self.db.collection collection.insert_many([ {'array': ['one', 'three']}, ]) actual = collection.aggregate([{'$project': { '_id': 0, 'array': {'$setUnion': [['one', 'two'], '$array']}, 'distinct': {'$setUnion': [['one', 'two'], ['three'], ['four']]}, 'nested': {'$setUnion': [['one', 'two'], [['one', 'two']]]}, 'objects': {'$setUnion': [[{'a': 1}, {'b': 2}], [{'a': 1}, {'c': 3}]]}, }}]) expect = [{ 'array': ['one', 'two', 'three'], 'distinct': ['one', 'two', 'three', 'four'], 'nested': ['one', 'two', ['one', 'two']], 'objects': [{'a': 1}, {'b': 2}, {'c': 3}], }] self.assertEqual(expect, list(actual)) def test__set_equals(self): collection = self.db.collection collection.insert_many([ {'array': ['one', 'three']}, ]) actual = collection.aggregate([{'$project': { '_id': 0, 'same_array': {'$setEquals': ['$array', '$array']}, 'eq_array': {'$setEquals': [['one', 'three'], '$array']}, 'ne_array': {'$setEquals': [['one', 'two'], '$array']}, 'eq_in_another_order': {'$setEquals': [['one', 'two'], ['two', 'one']]}, 'ne_in_another_order': {'$setEquals': [['one', 'two'], ['three', 'one', 'two']]}, 'three_equal': {'$setEquals': [['one', 'two'], ['two', 'one'], ['one', 'two']]}, 'three_not_equal': {'$setEquals': [['one', 'three'], ['two', 'one'], ['two', 'one']]}, }}]) expect = [{ 'same_array': True, 'eq_array': True, 'ne_array': False, 'eq_in_another_order': True, 'ne_in_another_order': False, 'three_equal': True, 'three_not_equal': False, }] self.assertEqual(expect, list(actual)) def test__add_to_set_missing_value(self): collection = self.db.collection collection.insert_many([ {'key1': 'a', 'my_key': 1}, {'key1': 'a'}, ]) actual = collection.aggregate([{'$group': { '_id': {'key1': '$key1'}, 'my_keys': {'$addToSet': '$my_key'}, }}]) expect = [{ '_id': {'key1': 'a'}, 'my_keys': [1], }] self.assertEqual(expect, list(actual)) def test__not_implemented_operator(self): collection = self.db.collection with self.assertRaises(NotImplementedError): collection.find_one({'geometry': {'$geoIntersects': {'$geometry': { 'coordinates': [-122.4194, 37.7749], 'type': "", }}}}) def test__not_implemented_methods(self): collection = self.db.collection with self.assertRaises(NotImplementedError): collection.find_raw_batches() with self.assertRaises(NotImplementedError): collection.aggregate_raw_batches([{'$unwind': '$phones'}]) def test__insert_do_not_modify_input(self): collection = self.db.collection document = { 'number': 3, 'object': {'a': 1}, 'date': datetime(2000, 1, 1, 12, 30, 30, 12745, tzinfo=UTCPlus2()), } original_document = copy.deepcopy(document) collection.insert_one(document) self.assertNotEqual( original_document, document, msg='Document has been modified') self.assertEqual( dict(original_document, _id=None), dict(document, _id=None), msg='The only modification is adding the _id.') # Comparing stored document and the original one: the dates are modified. stored_document = collection.find_one() del stored_document['_id'] self.assertEqual( dict(original_document, date=None), dict(stored_document, date=None)) self.assertNotEqual( original_document, stored_document, msg='The document is not the same because the date TZ has been stripped of and the ' 'microseconds truncated.') self.assertNotEqual( original_document['date'].timestamp(), stored_document['date'].timestamp()) self.assertEqual( datetime(2000, 1, 1, 10, 30, 30, 12000), stored_document['date'], msg='The stored document holds a date as timezone naive UTC and without ' 'microseconds') # The objects are not linked: modifying the inserted document or the fetched one will # have no effect on future retrievals. document['object']['new_key'] = 42 fetched_document = stored_document fetched_document['object']['new_key'] = 'post-find' stored_document = collection.find_one() del stored_document['_id'] self.assertNotEqual( document, stored_document, msg='Modifying the inserted document afterwards does not modify the stored document.') self.assertNotEqual( fetched_document, stored_document, msg='Modifying the found document afterwards does not modify the stored document.') self.assertEqual(dict(original_document, date=None), dict(stored_document, date=None)) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_to_string(self): collection = self.db.collection collection.insert_one({ '_id': ObjectId('5dd6a8f302c91829ef248162'), 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'date': datetime(2018, 3, 27, 0, 58, 51, 538000), }) actual = collection.aggregate( [ { '$addFields': { '_id': {'$toString': '$_id'}, 'boolean_true': {'$toString': '$boolean_true'}, 'boolean_false': {'$toString': '$boolean_false'}, 'integer': {'$toString': '$integer'}, 'date': {'$toString': '$date'}, 'none': {'$toString': '$notexist'} } } ] ) expect = [{ '_id': '5dd6a8f302c91829ef248162', 'boolean_true': 'true', 'boolean_false': 'false', 'integer': '100', 'date': '2018-03-27T00:58:51.538Z', 'none': None }] self.assertEqual(expect, list(actual)) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_to_decimal(self): collection = self.db.collection collection.insert_one({ '_id': ObjectId('5dd6a8f302c91829ef248161'), 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'double': 1.999, 'decimal': decimal128.Decimal128('5.5000'), 'str_base_10_numeric': '123', 'str_negative_number': '-23', 'str_decimal_number': '1.99', 'str_not_numeric': '123a123', 'datetime': datetime.utcfromtimestamp(0), }) actual = collection.aggregate( [ { '$addFields': { 'boolean_true': {'$toDecimal': '$boolean_true'}, 'boolean_false': {'$toDecimal': '$boolean_false'}, 'integer': {'$toDecimal': '$integer'}, 'double': {'$toDecimal': '$double'}, 'decimal': {'$toDecimal': '$decimal'}, 'str_base_10_numeric': {'$toDecimal': '$str_base_10_numeric'}, 'str_negative_number': {'$toDecimal': '$str_negative_number'}, 'str_decimal_number': {'$toDecimal': '$str_decimal_number'}, 'datetime': {'$toDecimal': '$datetime'}, 'not_exist_field': {'$toDecimal': '$not_exist_field'}, } }, { '$project': { '_id': 0 } } ] ) expect = [{ 'boolean_true': decimal128.Decimal128('1'), 'boolean_false': decimal128.Decimal128('0'), 'integer': decimal128.Decimal128('100'), 'double': decimal128.Decimal128('1.99900000000000'), 'decimal': decimal128.Decimal128('5.5000'), 'str_base_10_numeric': decimal128.Decimal128('123'), 'str_negative_number': decimal128.Decimal128('-23'), 'str_decimal_number': decimal128.Decimal128('1.99'), 'str_not_numeric': '123a123', 'datetime': decimal128.Decimal128('0'), 'not_exist_field': None, }] self.assertEqual(expect, list(actual)) with self.assertRaises(mongomock.OperationFailure): collection.aggregate( [ { '$addFields': { 'str_not_numeric': {'$toDecimal': '$str_not_numeric'} } }, { '$project': { '_id': 0 } } ] ) with self.assertRaises(TypeError): collection.aggregate( [ { '$addFields': { '_id': {'$toDecimal': '$_id'} } }, { '$project': { '_id': 0 } } ] ) @skipIf(helpers.HAVE_PYMONGO, 'pymongo installed') def test__aggregate_to_decimal_without_pymongo(self): collection = self.db.collection collection.insert_one({ 'boolean_true': True, 'boolean_false': False, }) with self.assertRaises(NotImplementedError): collection.aggregate( [ { '$addFields': { 'boolean_true': {'$toDecimal': '$boolean_true'}, 'boolean_false': {'$toDecimal': '$boolean_false'}, } }, { '$project': { '_id': 0 } } ] ) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_to_int(self): collection = self.db.collection collection.insert_one({ 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'double': 1.999, 'decimal': decimal128.Decimal128('5.5000') }) actual = collection.aggregate( [ { '$addFields': { 'boolean_true': {'$toInt': '$boolean_true'}, 'boolean_false': {'$toInt': '$boolean_false'}, 'integer': {'$toInt': '$integer'}, 'double': {'$toInt': '$double'}, 'decimal': {'$toInt': '$decimal'}, 'not_exist': {'$toInt': '$not_exist'}, } }, { '$project': { '_id': 0 } } ] ) expect = [{ 'boolean_true': 1, 'boolean_false': 0, 'integer': 100, 'double': 1, 'decimal': 5, 'not_exist': None, }] self.assertEqual(expect, list(actual)) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_to_long(self): collection = self.db.collection collection.insert_one({ 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'double': 1.999, 'decimal': decimal128.Decimal128('5.5000') }) actual = collection.aggregate( [ { '$addFields': { 'boolean_true': {'$toLong': '$boolean_true'}, 'boolean_false': {'$toLong': '$boolean_false'}, 'integer': {'$toLong': '$integer'}, 'double': {'$toLong': '$double'}, 'decimal': {'$toLong': '$decimal'}, 'not_exist': {'$toLong': '$not_exist'}, } }, { '$project': { '_id': 0 } } ] ) expect = [{ 'boolean_true': 1, 'boolean_false': 0, 'integer': 100, 'double': 1, 'decimal': 5, 'not_exist': None, }] self.assertEqual(expect, list(actual)) @skipIf(helpers.HAVE_PYMONGO, 'pymongo installed') def test__aggregate_to_long_no_pymongo(self): collection = self.db.collection collection.drop() collection.insert_one({ 'double': 1.999, }) with self.assertRaises(NotImplementedError): list(collection.aggregate( [ { '$addFields': { 'double': {'$toLong': '$double'}, } }, { '$project': { '_id': 0 } } ] )) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_date_to_string(self): collection = self.db.collection collection.insert_one({ 'start_date': datetime(2011, 11, 4, 0, 5, 23), }) actual = collection.aggregate( [ { '$addFields': { 'start_date': { '$dateToString': {'format': '%Y/%m/%d %H:%M', 'date': '$start_date'} } } }, {'$project': {'_id': 0}}, ] ) expect = [{ 'start_date': '2011/11/04 00:05', }] self.assertEqual(expect, list(actual)) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ {'$project': {'a': {'$dateToString': { 'date': datetime.now(), 'format': '%L' }}}}, ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ {'$project': {'a': {'$dateToString': { 'date': datetime.now(), 'format': '%m', 'onNull': 'a' }}}}, ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ {'$project': {'a': {'$dateToString': { 'date': datetime.now(), 'format': '%m', 'timezone': 'America/New_York' }}}}, ]) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([ {'$project': {'a': {'$dateToString': { 'date': datetime.now(), }}}}, ]) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([ {'$project': {'a': {'$dateToString': { 'format': '%m', }}}}, ]) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate( [{'$project': {'a': {'$dateToString': '10'}}}] ) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__aggregate_date_from_parts(self): collection = self.db.collection collection.insert_one({ 'start_date': datetime(2022, 8, 3, 0, 5, 23), }) actual = collection.aggregate([ { '$addFields': { 'start_date': { '$dateFromParts': { 'year': {'$year': '$start_date'}, 'month': {'$month': '$start_date'}, 'day': {'$dayOfMonth': '$start_date'}, } } } }, {'$project': {'_id': 0}}, ]) expect = [{ 'start_date': datetime(2022, 8, 3), }] self.assertEqual(expect, list(actual)) with self.assertRaises(mongomock.OperationFailure): self.db.collection.aggregate([ { '$addFields': { 'start_date': { '$dateFromParts': { 'day': 1, } } } } ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ { '$addFields': { 'start_date': { '$dateFromParts': { 'isoWeekYear': 1, } } } } ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ { '$addFields': { 'start_date': { '$dateFromParts': { 'isoWeekYear': 1, 'isoWeek': 53, } } } } ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ { '$addFields': { 'start_date': { '$dateFromParts': { 'isoWeekYear': 1, 'isoDayOfWeek': 7, } } } } ]) with self.assertRaises(NotImplementedError): self.db.collection.aggregate([ { '$addFields': { 'start_date': { '$dateFromParts': { 'year': {'$year': '$start_date'}, 'timezone': 'America/New_York', } } } } ]) def test__aggregate_array_to_object(self): collection = self.db.collection collection.insert_many([{ 'items': [['a', 1], ['b', 2], ['c', 3], ['a', 4]] }, { 'items': (['a', 1], ['b', 2], ['c', 3], ['a', 4]) }, { 'items': [('a', 1), ('b', 2), ('c', 3), ('a', 4)] }, { 'items': (('a', 1), ('b', 2), ('c', 3), ('a', 4)) }, { 'items': [['a', 1], ('b', 2), ['c', 3], ('a', 4)] }, { 'items': (['a', 1], ('b', 2), ['c', 3], ('a', 4)) }, { 'items': [{'k': 'a', 'v': 1}, {'k': 'b', 'v': 2}, {'k': 'c', 'v': 3}, {'k': 'a', 'v': 4}], }, { 'items': [], }, { 'items': (), }, { 'items': None, }]) actual = collection.aggregate([ {'$project': { 'items': {'$arrayToObject': '$items'}, 'not_exists': {'$arrayToObject': '$nothing'}, '_id': 0 }} ]) expect = [{ 'items': {'a': 4, 'b': 2, 'c': 3}, 'not_exists': None }, { 'items': {'a': 4, 'b': 2, 'c': 3}, 'not_exists': None }, { 'items': {'a': 4, 'b': 2, 'c': 3}, 'not_exists': None }, { 'items': {'a': 4, 'b': 2, 'c': 3}, 'not_exists': None }, { 'items': {'a': 4, 'b': 2, 'c': 3}, 'not_exists': None }, { 'items': {'a': 4, 'b': 2, 'c': 3}, 'not_exists': None }, { 'items': {'a': 4, 'b': 2, 'c': 3}, 'not_exists': None }, { 'items': {}, 'not_exists': None }, { 'items': {}, 'not_exists': None }, { 'items': None, 'not_exists': None }] self.assertEqual(expect, list(actual)) # All of these items should trigger an error items = [[ {'$addFields': {'items': ''}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': 100}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [['a', 'b', 'c'], ['d', 2]]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [['a'], ['b', 2]]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [[]]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [{'k': 'a', 'v': 1, 't': 't'}, {'k': 'b', 'v': 2}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [{'v': 1, 't': 't'}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [{}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [['a', 1], {'k': 'b', 'v': 2}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ]] for item in items: with self.assertRaises(mongomock.OperationFailure): collection.aggregate(item) def test_aggregate_object_to_array(self): collection = self.db.collection collection.insert_many([ {'items': None}, {'items': {'qty': 25}}, {'items': { 'size': {'len': 25, 'w': 10, 'uom': 'cm'}, }}, ]) expect = [{ 'items': None, 'not_exists': None }, { 'items': [ {'k': 'qty', 'v': 25}, ], 'not_exists': None }, { 'items': [ {'k': 'size', 'v': {'len': 25, 'w': 10, 'uom': 'cm'}}, ], 'not_exists': None }] actual = collection.aggregate([ {'$project': { 'items': {'$objectToArray': '$items'}, 'not_exists': {'$objectToArray': '$nothing'}, '_id': 0 }} ]) self.assertEqual(expect, list(actual)) # All of these items should trigger an error items = [[ {'$addFields': {'items': ''}}, {'$project': {'items': {'$objectToArray': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': 100}}, {'$project': {'items': {'$objectToArray': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [[]]}}, {'$project': {'items': {'$objectToArray': '$items'}, '_id': 0}} ]] for item in items: with self.assertRaises(mongomock.OperationFailure): collection.aggregate(item) # https://docs.mongodb.com/manual/reference/operator/aggregation/objectToArray/#examples @skipIf( sys.version_info < (3, 6), "It's harder to keep dict sorted in older versions of Python") def test_aggregate_object_to_array_with_example(self): collection = self.db.collection collection.insert_many([ {'_id': 1, 'item': 'ABC1', 'dimensions': collections.OrderedDict([ ('l', 25), ('w', 10), ('uom', 'cm'), ])}, {'_id': 2, 'item': 'ABC2', 'dimensions': collections.OrderedDict([ ('l', 50), ('w', 25), ('uom', 'cm'), ])}, {'_id': 3, 'item': 'XYZ1', 'dimensions': collections.OrderedDict([ ('l', 70), ('w', 75), ('uom', 'cm'), ])}, ]) expect = [ {'_id': 1, 'item': 'ABC1', 'dims': [ {'k': 'l', 'v': 25}, {'k': 'w', 'v': 10}, {'k': 'uom', 'v': 'cm'}, ]}, {'_id': 2, 'item': 'ABC2', 'dims': [ {'k': 'l', 'v': 50}, {'k': 'w', 'v': 25}, {'k': 'uom', 'v': 'cm'}, ]}, {'_id': 3, 'item': 'XYZ1', 'dims': [ {'k': 'l', 'v': 70}, {'k': 'w', 'v': 75}, {'k': 'uom', 'v': 'cm'}, ]}, ] actual = collection.aggregate([{ '$project': { 'item': 1, 'dims': {'$objectToArray': '$dimensions'}, }, }]) self.assertEqual(expect, list(actual)) def test_aggregate_is_number(self): collection = self.db.collection collection.insert_one( {'_id': 1, 'int': 3, 'big_int': 3 ** 10, 'negative': -3, 'str': 'not_a_number', 'str_numeric': '3', 'float': 3.3, 'negative_float': -3.3, 'bool': True, 'none': None} ) expect = [ {'int': True, 'big_int': True, 'negative': True, 'str': False, 'str_numeric': False, 'float': True, 'negative_float': True, 'bool': False, 'none': False}, ] actual = collection.aggregate([{ '$project': { '_id': False, 'int': {'$isNumber': '$int'}, 'big_int': {'$isNumber': '$big_int'}, 'negative': {'$isNumber': '$negative'}, 'str': {'$isNumber': '$str'}, 'str_numeric': {'$isNumber': '$str_numeric'}, 'float': {'$isNumber': '$float'}, 'negative_float': {'$isNumber': '$negative_float'}, 'bool': {'$isNumber': '$bool'}, 'none': {'$isNumber': '$none'}, }, }]) self.assertEqual(expect, list(actual)) def test_aggregate_is_array(self): collection = self.db.collection collection.insert_one( {'_id': 1, 'list': [1, 2, 3], 'tuple': (1, 2, 3), 'empty_list': [], 'empty_tuple': (), 'int': 3, 'str': '123', 'bool': True, 'none': None} ) expect = [ {'list': True, 'tuple': True, 'empty_list': True, 'empty_tuple': True, 'int': False, 'str': False, 'bool': False, 'none': False}, ] actual = collection.aggregate([{ '$project': { '_id': False, 'list': {'$isArray': '$list'}, 'tuple': {'$isArray': '$tuple'}, 'empty_list': {'$isArray': '$empty_list'}, 'empty_tuple': {'$isArray': '$empty_tuple'}, 'int': {'$isArray': '$int'}, 'str': {'$isArray': '$str'}, 'bool': {'$isArray': '$bool'}, 'none': {'$isArray': '$none'}, }, }]) self.assertEqual(expect, list(actual)) def test_aggregate_project_with_boolean(self): collection = self.db.collection # Test with no items expect = [] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': []}}} ]) self.assertEqual(expect, list(actual)) expect = [] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': []}}} ]) self.assertEqual(expect, list(actual)) expect = [] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {}}}} ]) self.assertEqual(expect, list(actual)) # Tests following are with one item collection.insert_one({ 'items': [] }) # Test with 0 arguments expect = [{'items': True}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': []}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': False}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': []}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': False}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {}}}} ]) self.assertEqual(expect, list(actual)) # Test with one argument expect = [{'items': True}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [True]}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': True}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [True]}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': False}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$not': True}}} ]) self.assertEqual(expect, list(actual)) # Test with two arguments expect = [{'items': True}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [True, True]}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': False}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [False, True]}}} ]) self.assertEqual(expect, list(actual)) actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [True, False]}}} ]) self.assertEqual(expect, list(actual)) actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [False, False]}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': True}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [True, True]}}} ]) self.assertEqual(expect, list(actual)) actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [False, True]}}} ]) self.assertEqual(expect, list(actual)) actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [True, False]}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': False}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [False, False]}}} ]) self.assertEqual(expect, list(actual)) # Following tests are with more than two items collection.insert_many([ {'items': []}, {'items': []} ]) expect = [{'items': True}] * 3 actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': []}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': False}] * 3 actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': []}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': False}] * 3 actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {}}}} ]) self.assertEqual(expect, list(actual)) # Test with something else than boolean collection.insert_one({ 'items': ['foo'] }) expect = [{'items': False}] * 3 + [{'items': True}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [{'$eq': ['$items', ['foo']]}]}}} ]) self.assertEqual(expect, list(actual)) actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [{'$eq': ['$items', ['foo']]}]}}} ]) self.assertEqual(expect, list(actual)) expect = [{'items': True}] * 3 + [{'items': False}] actual = collection.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {'$eq': ['$items', ['foo']]}}}} ]) self.assertEqual(expect, list(actual)) def test_set_no_content(self): collection = self.db.collection collection.insert_one({'a': 1}) if SERVER_VERSION >= version.parse('5.0'): collection.update_one({}, {'$set': {}}) collection.update_one({'b': 'will-never-exist'}, {'$set': {}}) return with self.assertRaises(mongomock.WriteError): collection.update_one({}, {'$set': {}}) with self.assertRaises(mongomock.WriteError): collection.update_one({'b': 'will-never-exist'}, {'$set': {}}) def test_snapshot_arg(self): self.db.collection.find(snapshot=False) def test_elem_match(self): self.db.collection.insert_many([ {'_id': 0, 'arr': [0, 1, 2, 3, 10]}, {'_id': 1, 'arr': [0, 2, 4, 6]}, {'_id': 2, 'arr': [1, 3, 5, 7]} ]) ids = set(doc['_id'] for doc in self.db.collection.find( {'arr': {'$elemMatch': {'$lt': 10, '$gt': 4}}}, {'_id': 1})) self.assertEqual({1, 2}, ids) def test_list_collection_names_filter(self): now = datetime.now() self.db.create_collection('aggregator') for day in range(10): new_date = now - timedelta(day) self.db.create_collection('historical_{0}'.format(new_date.strftime('%Y_%m_%d'))) # test without filter self.assertEqual(len(self.db.list_collection_names()), 11) # test regex assert len(self.db.list_collection_names(filter={ 'name': {'$regex': r'historical_\d{4}_\d{2}_\d{2}'} })) == 10 new_date = datetime.now() - timedelta(1) col_name = 'historical_{0}'.format(new_date.strftime('%Y_%m_%d')) # test not equal self.assertEqual(len(self.db.list_collection_names(filter={'name': {'$ne': col_name}})), 10) # test equal assert col_name in self.db.list_collection_names(filter={'name': col_name}) # neg invalid field with self.assertRaises(NotImplementedError): self.db.list_collection_names(filter={'_id': {'$ne': col_name}}) # neg invalid operator with self.assertRaises(NotImplementedError): self.db.list_collection_names(filter={'name': {'$ge': col_name}}) def test__equality(self): self.assertEqual(self.db.a, self.db.a) self.assertNotEqual(self.db.a, self.db.b) self.assertEqual(self.db.a, self.db.get_collection('a')) self.assertNotEqual(self.db.a, self.client.other_db.a) client = mongomock.MongoClient('localhost') self.assertEqual(client.db.collection, mongomock.MongoClient('localhost').db.collection) self.assertNotEqual( client.db.collection, mongomock.MongoClient('example.com').db.collection) @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipUnless( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__not_hashable(self): with self.assertRaises(TypeError): {self.db.a, self.db.b} # pylint: disable=pointless-statement @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__hashable(self): {self.db.a, self.db.b} # pylint: disable=pointless-statement def test__bad_type_as_a_read_concern_returns_type_error(self): with self.assertRaises( TypeError, msg='read_concern must be an instance of pymongo.read_concern.ReadConcern' ): mongomock.collection.Collection(self.db, 'foo', None, read_concern='bar') def test__cursor_allow_disk_use(self): col = self.db.col col.find().allow_disk_use(True) col.find().allow_disk_use(False) col.find().allow_disk_use() with self.assertRaises(TypeError): col.find().allow_disk_use(1) # use the keyword argument col.find(allow_disk_use=True) col.find(allow_disk_use=False) col.find() with self.assertRaises(TypeError): col.find(allow_disk_use=1) mongomock-4.1.3/tests/test__database_api.py000066400000000000000000000256401456276026200210400ustar00rootroot00000000000000import collections import datetime from packaging import version import sys from unittest import TestCase, skipIf, skipUnless import mongomock from mongomock import helpers from mongomock import read_concern try: from bson import codec_options from pymongo.read_preferences import ReadPreference except ImportError: pass class UTCPlus2(datetime.tzinfo): def fromutc(self, dt): return dt + self.utcoffset(dt) def tzname(self, dt): return '' def utcoffset(self, dt): return datetime.timedelta(hours=2) def dst(self, dt): return datetime.timedelta() class DatabaseAPITest(TestCase): def setUp(self): self.database = mongomock.MongoClient().somedb def test__get_collection_by_attribute_underscore(self): with self.assertRaises(AttributeError) as err_context: self.database._users # pylint: disable=pointless-statement self.assertIn("Database has no attribute '_users'", str(err_context.exception)) # No problem accessing it through __get_item__. self.database['_users'].insert_one({'a': 1}) self.assertEqual(1, self.database['_users'].find_one().get('a')) def test__session(self): with self.assertRaises(NotImplementedError): self.database.list_collection_names(session=1) with self.assertRaises(NotImplementedError): self.database.drop_collection('a', session=1) with self.assertRaises(NotImplementedError): self.database.create_collection('a', session=1) with self.assertRaises(NotImplementedError): self.database.dereference(_DBRef('somedb', 'a', 'b'), session=1) def test__command_ping(self): self.assertEqual({'ok': 1}, self.database.command({'ping': 1})) def test__command_ping_string(self): self.assertEqual({'ok': 1}, self.database.command('ping')) def test__command_fake_ping_string(self): with self.assertRaises(NotImplementedError): self.assertEqual({'ok': 1}, self.database.command('a_nice_ping')) def test__command(self): with self.assertRaises(NotImplementedError): self.database.command({'count': 'user'}) def test__repr(self): self.assertEqual( "Database(mongomock.MongoClient('localhost', 27017), 'somedb')", repr(self.database)) def test__rename_unknown_collection(self): with self.assertRaises(mongomock.OperationFailure): self.database.rename_collection('a', 'b') def test__dereference(self): self.database.a.insert_one({'_id': 'b', 'val': 42}) doc = self.database.dereference(_DBRef('somedb', 'a', 'b')) self.assertEqual({'_id': 'b', 'val': 42}, doc) self.assertEqual(None, self.database.dereference(_DBRef('somedb', 'a', 'a'))) self.assertEqual(None, self.database.dereference(_DBRef('somedb', 'b', 'b'))) with self.assertRaises(ValueError): self.database.dereference(_DBRef('otherdb', 'a', 'b')) with self.assertRaises(TypeError): self.database.dereference('b') def test__read_preference(self): self.assertEqual('Primary', self.database.read_preference.name) self.assertEqual(self.database.collection.read_preference, self.database.read_preference) with self.assertRaises(TypeError): self.database.get_collection('a', read_preference='nearest') @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__get_collection_different_read_preference(self): database = mongomock.MongoClient()\ .get_database('somedb', read_preference=ReadPreference.NEAREST) self.assertEqual('Nearest', database.read_preference.name) self.assertEqual(database.read_preference, database.collection.read_preference) col = database.get_collection('col', read_preference=ReadPreference.PRIMARY) self.assertEqual('Primary', col.read_preference.name) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__get_collection_different_codec_options(self): database = mongomock.MongoClient().somedb a = database.get_collection('a', codec_options=codec_options.CodecOptions(tz_aware=True)) self.assertTrue(a.codec_options.tz_aware) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__codec_options(self): self.assertEqual(codec_options.CodecOptions(), self.database.codec_options) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__read_concern(self): self.assertEqual(read_concern.ReadConcern(), self.database.read_concern) def test__with_options(self): with self.assertRaises(NotImplementedError): self.database.with_options(write_concern=3) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__with_options_pymongo(self): other = self.database.with_options(read_preference=self.database.NEAREST) self.assertFalse(other is self.database) self.database.coll.insert_one({'_id': 42}) self.assertEqual({'_id': 42}, other.coll.find_one()) self.database.with_options(codec_options=codec_options.CodecOptions()) self.database.with_options() self.database.with_options(codec_options=codec_options.CodecOptions(tz_aware=True)) tz_aware_db = mongomock.MongoClient(tz_aware=True).somedb self.assertIs( tz_aware_db, tz_aware_db.with_options(codec_options=codec_options.CodecOptions(tz_aware=True))) custom_document_class = codec_options.CodecOptions(document_class=collections.OrderedDict) with self.assertRaises(NotImplementedError): self.database.with_options(custom_document_class) custom_uuid_representation = codec_options.CodecOptions(uuid_representation=4) with self.assertRaises(NotImplementedError): self.database.with_options(custom_uuid_representation) custom_unicode_error_hander = codec_options.CodecOptions( unicode_decode_error_handler='ignore') with self.assertRaises(NotImplementedError): self.database.with_options(custom_unicode_error_hander) custom_tzinfo = codec_options.CodecOptions(tz_aware=True, tzinfo=UTCPlus2()) with self.assertRaises(NotImplementedError): self.database.with_options(custom_tzinfo) @skipIf( not helpers.HAVE_PYMONGO or helpers.PYMONGO_VERSION < version.parse('3.8'), 'pymongo not installed or <3.8') def test__with_options_type_registry(self): class _CustomTypeCodec(codec_options.TypeCodec): @property def python_type(self): # pylint: disable=invalid-overridden-method return _CustomTypeCodec def transform_python(self, unused_value): pass @property def bson_type(self): # pylint: disable=invalid-overridden-method return int def transform_bson(self, unused_value): pass custom_type_registry = codec_options.CodecOptions( type_registry=codec_options.TypeRegistry([_CustomTypeCodec()])) with self.assertRaises(NotImplementedError): self.database.with_options(custom_type_registry) def test__collection_names(self): self.database.create_collection('a') self.database.create_collection('b') if helpers.PYMONGO_VERSION >= version.parse('4.0'): with self.assertRaises(TypeError): self.database.collection_names() return self.assertEqual(set(self.database.collection_names()), set(['a', 'b'])) self.database.c.drop() self.assertEqual(set(self.database.collection_names()), set(['a', 'b'])) def test__list_collection_names(self): self.database.create_collection('a') self.database.create_collection('b') self.assertEqual(set(self.database.list_collection_names()), set(['a', 'b'])) self.database.c.drop() self.assertEqual(set(self.database.list_collection_names()), set(['a', 'b'])) def test__list_collections(self): self.database.create_collection('a') with self.assertRaises(NotImplementedError): self.database.list_collections() def test__create_collection(self): coll = self.database.create_collection('c') self.assertIs(self.database.c, coll) self.assertRaises(mongomock.CollectionInvalid, self.database.create_collection, 'c') def test__create_collection_bad_names(self): with self.assertRaises(TypeError): self.database.create_collection(3) with self.assertRaises(TypeError): self.database[3] # pylint: disable=pointless-statement bad_names = ( '', 'foo..bar', '...', '$foo', '.foo', 'bar.', 'foo\x00bar', ) for name in bad_names: with self.assertRaises(mongomock.InvalidName, msg=name): self.database.create_collection(name) with self.assertRaises(mongomock.InvalidName, msg=name): self.database[name] # pylint: disable=pointless-statement def test__lazy_create_collection(self): col = self.database.a self.assertEqual(set(self.database.list_collection_names()), set()) col.insert_one({'foo': 'bar'}) self.assertEqual(set(self.database.list_collection_names()), set(['a'])) def test__equality(self): self.assertEqual(self.database, self.database) client = mongomock.MongoClient('localhost') self.assertNotEqual(client.a, client.b) self.assertEqual(client.a, client.get_database('a')) self.assertEqual(client.a, mongomock.MongoClient('localhost').a) self.assertNotEqual(client.a, mongomock.MongoClient('example.com').a) @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipUnless( helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__not_hashable(self): with self.assertRaises(TypeError): {self.database} # pylint: disable=pointless-statement @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipIf( helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__hashable(self): {self.database} # pylint: disable=pointless-statement def test__bad_type_as_a_read_concern_returns_type_error(self): client = mongomock.MongoClient() with self.assertRaises( TypeError, msg='read_concern must be an instance of pymongo.read_concern.ReadConcern' ): mongomock.database.Database(client, 'foo', None, read_concern='bar') _DBRef = collections.namedtuple('DBRef', ['database', 'collection', 'id']) mongomock-4.1.3/tests/test__diff.py000066400000000000000000000016051456276026200173460ustar00rootroot00000000000000from tests.diff import diff from unittest import TestCase class DiffTest(TestCase): def test__assert_no_diff(self): for obj in [ 1, 'string', {'complex': {'object': {'with': ['lists']}}}, ]: self.assertEqual(diff(obj, obj), []) def test__diff_values(self): self._assert_entire_diff(1, 2) self._assert_entire_diff('a', 'b') def test__diff_sequences(self): self._assert_entire_diff([], [1, 2, 3]) def test__composite_diff(self): a = {'a': {'b': [1, 2, 3]}} b = {'a': {'b': [1, 6, 3]}} [(path, x, y)] = diff(a, b) self.assertEqual(path, ['a', 'b', 1]) self.assertEqual(x, 2) self.assertEqual(y, 6) def _assert_entire_diff(self, a, b): [(_, x, y)] = diff(a, b) self.assertEqual(x, a) self.assertEqual(y, b) mongomock-4.1.3/tests/test__gridfs.py000066400000000000000000000167521456276026200177250ustar00rootroot00000000000000import os import time import unittest from unittest import TestCase, skipIf, skipUnless import mongomock import mongomock.gridfs from mongomock import helpers from packaging import version try: import gridfs from gridfs import errors _HAVE_GRIDFS = True except ImportError: _HAVE_GRIDFS = False try: from bson.objectid import ObjectId import pymongo from pymongo import MongoClient as PymongoClient except ImportError: ... @skipUnless(helpers.HAVE_PYMONGO, 'pymongo not installed') @skipUnless(_HAVE_GRIDFS and hasattr(gridfs.__builtins__, 'copy'), 'gridfs not installed') @skipIf(os.getenv('NO_LOCAL_MONGO'), 'No local Mongo server running') class GridFsTest(TestCase): @classmethod def setUpClass(cls): mongomock.gridfs.enable_gridfs_integration() def setUp(self): super(GridFsTest, self).setUp() self.fake_conn = mongomock.MongoClient() self.mongo_conn = self._connect_to_local_mongodb() self.db_name = 'mongomock___testing_db' self.mongo_conn[self.db_name]['fs']['files'].drop() self.mongo_conn[self.db_name]['fs']['chunks'].drop() self.real_gridfs = gridfs.GridFS(self.mongo_conn[self.db_name]) self.fake_gridfs = gridfs.GridFS(self.fake_conn[self.db_name]) def tearDown(self): super(GridFsTest, self).setUp() self.mongo_conn.close() self.fake_conn.close() def test__put_get_small(self): before = time.time() fid = self.fake_gridfs.put(GenFile(50)) rid = self.real_gridfs.put(GenFile(50)) after = time.time() ffile = self.fake_gridfs.get(fid) rfile = self.real_gridfs.get(rid) self.assertEqual(ffile.read(), rfile.read()) fake_doc = self.get_fake_file(fid) mongo_doc = self.get_mongo_file(rid) self.assertSameFile(mongo_doc, fake_doc, max_delta_seconds=after - before + 1) def test__put_get_big(self): # 500k files are bigger than doc size limit before = time.time() fid = self.fake_gridfs.put(GenFile(500000, 10)) rid = self.real_gridfs.put(GenFile(500000, 10)) after = time.time() ffile = self.fake_gridfs.get(fid) rfile = self.real_gridfs.get(rid) self.assertEqual(ffile.read(), rfile.read()) fake_doc = self.get_fake_file(fid) mongo_doc = self.get_mongo_file(rid) self.assertSameFile(mongo_doc, fake_doc, max_delta_seconds=after - before + 1) def test__delete_exists_small(self): fid = self.fake_gridfs.put(GenFile(50)) self.assertTrue(self.get_fake_file(fid) is not None) self.assertTrue(self.fake_gridfs.exists(fid)) self.fake_gridfs.delete(fid) self.assertFalse(self.fake_gridfs.exists(fid)) self.assertFalse(self.get_fake_file(fid) is not None) # All the chunks got removed self.assertEqual(0, self.fake_conn[self.db_name].fs.chunks.count_documents({})) def test__delete_exists_big(self): fid = self.fake_gridfs.put(GenFile(500000)) self.assertTrue(self.get_fake_file(fid) is not None) self.assertTrue(self.fake_gridfs.exists(fid)) self.fake_gridfs.delete(fid) self.assertFalse(self.fake_gridfs.exists(fid)) self.assertFalse(self.get_fake_file(fid) is not None) # All the chunks got removed self.assertEqual(0, self.fake_conn[self.db_name].fs.chunks.count_documents({})) def test__delete_no_file(self): # Just making sure we don't crash self.fake_gridfs.delete(ObjectId()) def test__list_files(self): fids = [self.fake_gridfs.put(GenFile(50, 9), filename='one'), self.fake_gridfs.put(GenFile(62, 5), filename='two'), self.fake_gridfs.put(GenFile(654, 1), filename='three'), self.fake_gridfs.put(GenFile(5), filename='four')] names = ['one', 'two', 'three', 'four'] names_no_two = [x for x in names if x != 'two'] for x in self.fake_gridfs.list(): self.assertIn(x, names) self.fake_gridfs.delete(fids[1]) for x in self.fake_gridfs.list(): self.assertIn(x, names_no_two) three_file = self.get_fake_file(fids[2]) self.assertEqual('three', three_file['filename']) self.assertEqual(654, three_file['length']) self.fake_gridfs.delete(fids[0]) self.fake_gridfs.delete(fids[2]) self.fake_gridfs.delete(fids[3]) self.assertEqual(0, len(self.fake_gridfs.list())) def test__find_files(self): fids = [self.fake_gridfs.put(GenFile(50, 9), filename='a'), self.fake_gridfs.put(GenFile(62, 5), filename='b'), self.fake_gridfs.put(GenFile(654, 1), filename='b'), self.fake_gridfs.put(GenFile(5), filename='a')] c = self.fake_gridfs.find({'filename': 'a'}).sort('uploadDate', -1) should_be_fid3 = c.next() should_be_fid0 = c.next() self.assertFalse(c.alive) self.assertEqual(fids[3], should_be_fid3._id) self.assertEqual(fids[0], should_be_fid0._id) def test__put_exists(self): self.fake_gridfs.put(GenFile(1), _id='12345') with self.assertRaises(errors.FileExists): self.fake_gridfs.put(GenFile(2, 3), _id='12345') def assertSameFile(self, real, fake, max_delta_seconds=1): # https://pymongo.readthedocs.io/en/stable/migrate-to-pymongo4.html#disable-md5-parameter-is-removed if helpers.PYMONGO_VERSION < version.parse('4.0'): self.assertEqual(real['md5'], fake['md5']) self.assertEqual(real['length'], fake['length']) self.assertEqual(real['chunkSize'], fake['chunkSize']) self.assertLessEqual( abs(real['uploadDate'] - fake['uploadDate']).seconds, max_delta_seconds, msg='real: %s, fake: %s' % (real['uploadDate'], fake['uploadDate'])) def get_mongo_file(self, i): return self.mongo_conn[self.db_name]['fs']['files'].find_one({'_id': i}) def get_fake_file(self, i): return self.fake_conn[self.db_name]['fs']['files'].find_one({'_id': i}) def _connect_to_local_mongodb(self, num_retries=60): """Performs retries on connection refused errors (for travis-ci builds)""" for retry in range(num_retries): if retry > 0: time.sleep(0.5) try: return PymongoClient( host=os.environ.get('TEST_MONGO_HOST', 'localhost'), maxPoolSize=1 ) except pymongo.errors.ConnectionFailure as e: if retry == num_retries - 1: raise if 'connection refused' not in e.message.lower(): raise class GenFile(object): def __init__(self, length, value=0, do_encode=True): self.gen = self._gen_data(length, value) self.do_encode = do_encode def _gen_data(self, length, value): while length: length -= 1 yield value def _maybe_encode(self, s): if self.do_encode and isinstance(s, str): return s.encode('UTF-8') return s def read(self, num_bytes=-1): s = '' if num_bytes <= 0: bytes_left = -1 else: bytes_left = num_bytes while True: n = next(self.gen, None) if n is None: return self._maybe_encode(s) s += chr(n) bytes_left -= 1 if bytes_left == 0: return self._maybe_encode(s) if __name__ == '__main__': unittest.main() mongomock-4.1.3/tests/test__helpers.py000066400000000000000000000135641456276026200201070ustar00rootroot00000000000000import json import os from mongomock.helpers import hashdict from mongomock.helpers import get_value_by_dot, set_value_by_dot from mongomock.helpers import parse_uri from mongomock.helpers import print_deprecation_warning from unittest import TestCase class HashdictTest(TestCase): def test__hashdict(self): """Make sure hashdict can be used as a key for a dict""" h = {} _id = hashdict({'a': 1}) h[_id] = 'foo' self.assertEqual(h[_id], 'foo') _id = hashdict({'a': {'foo': 2}}) h[_id] = 'foo' self.assertEqual(h[_id], 'foo') _id = hashdict({'a': {'foo': {'bar': 3}}}) h[_id] = 'foo' self.assertEqual(h[_id], 'foo') _id = hashdict({hashdict({'a': '3'}): {'foo': 2}}) h[_id] = 'foo' self.assertEqual(h[_id], 'foo') with self.assertRaises(TypeError): _id['a'] = 2 with self.assertRaises(TypeError): del _id['a'] with self.assertRaises(TypeError): _id.clear() with self.assertRaises(TypeError): _id.pop('a') with self.assertRaises(TypeError): _id.popitem('a') with self.assertRaises(TypeError): _id.setdefault('c', 3) with self.assertRaises(TypeError): _id.update({'b': 2, 'c': 4}) self.assertEqual( hashdict({'a': 1, 'b': 3, 'c': 4}), hashdict({'a': 1, 'b': 2}) + hashdict({'b': 3, 'c': 4})) self.assertEqual('hashdict(a=1, b=2)', repr(hashdict({'a': 1, 'b': 2}))) class TestDeprecationWarning(TestCase): def test__deprecation_warning(self): # ensure this doesn't throw an exception print_deprecation_warning('aaa', 'bbb') class TestAllUriScenarios(TestCase): pass _URI_SPEC_TEST_PATH = os.path.join( os.path.dirname(os.path.realpath(__file__)), os.path.join('connection_string', 'test')) def create_uri_spec_tests(): """Use json specifications in `_TEST_PATH` to generate uri spec tests. This is a simplified version from the PyMongo "test/test_uri_spec.py". It is modified to disregard warnings and only check that valid uri's are valid with the correct database. """ def create_uri_spec_test(scenario_def): def run_scenario(self): self.assertTrue(scenario_def['tests'], 'tests cannot be empty') for test in scenario_def['tests']: dsc = test['description'] error = False try: dbase = parse_uri(test['uri'])['database'] except Exception as e: print(e) error = True self.assertEqual(not error, test['valid'], "Test failure '%s'" % dsc) # Compare auth options. auth = test['auth'] if auth is not None: expected_dbase = auth.pop('db') # db == database # Special case for PyMongo's collection parsing if expected_dbase and '.' in expected_dbase: expected_dbase, _ = expected_dbase.split('.', 1) self.assertEqual(expected_dbase, dbase, 'Expected %s but got %s' % (expected_dbase, dbase)) return run_scenario for dirpath, _, filenames in os.walk(_URI_SPEC_TEST_PATH): dirname = os.path.split(dirpath) dirname = os.path.split(dirname[-2])[-1] + '_' + dirname[-1] for filename in filenames: with open(os.path.join(dirpath, filename)) as scenario_stream: scenario_def = json.load(scenario_stream) # Construct test from scenario. new_test = create_uri_spec_test(scenario_def) test_name = 'test_%s_%s' % ( dirname, os.path.splitext(filename)[0]) new_test.__name__ = test_name setattr(TestAllUriScenarios, new_test.__name__, new_test) create_uri_spec_tests() class ValueByDotTest(TestCase): def test__get_value_by_dot_missing_key(self): """Test get_value_by_dot raises KeyError when looking for a missing key""" for doc, key in ( ({}, 'a'), ({'a': 1}, 'b'), ({'a': 1}, 'a.b'), ({'a': {'b': 1}}, 'a.b.c'), ({'a': {'b': 1}}, 'a.c'), ({'a': [{'b': 1}]}, 'a.b'), ({'a': [{'b': 1}]}, 'a.1.b')): self.assertRaises(KeyError, get_value_by_dot, doc, key) def test__get_value_by_dot_find_key(self): """Test get_value_by_dot when key can be found""" for doc, key, expected in ( ({'a': 1}, 'a', 1), ({'a': {'b': 1}}, 'a', {'b': 1}), ({'a': {'b': 1}}, 'a.b', 1), ({'a': [{'b': 1}]}, 'a.0.b', 1)): found = get_value_by_dot(doc, key) self.assertEqual(found, expected) def test__set_value_by_dot(self): """Test set_value_by_dot""" for doc, key, expected in ( ({}, 'a', {'a': 42}), ({'a': 1}, 'a', {'a': 42}), ({'a': {'b': 1}}, 'a', {'a': 42}), ({'a': {'b': 1}}, 'a.b', {'a': {'b': 42}}), ({'a': [{'b': 1}]}, 'a.0', {'a': [42]}), ({'a': [{'b': 1}]}, 'a.0.b', {'a': [{'b': 42}]})): ret = set_value_by_dot(doc, key, 42) assert ret is doc self.assertEqual(ret, expected) def test__set_value_by_dot_bad_key(self): """Test set_value_by_dot when key has an invalid parent""" for doc, key in ( ({}, 'a.b'), ({'a': 1}, 'a.b'), ({'a': {'b': 1}}, 'a.b.c'), ({'a': [{'b': 1}]}, 'a.1.b'), ({'a': [{'b': 1}]}, 'a.1')): self.assertRaises(KeyError, set_value_by_dot, doc, key, 42) mongomock-4.1.3/tests/test__mongomock.py000066400000000000000000005442061456276026200204400ustar00rootroot00000000000000# -*- coding: utf-8 -*- from collections import OrderedDict import copy import datetime import os from packaging import version import re import sys import time from unittest import TestCase, skipIf, skipUnless import uuid import mongomock from mongomock import ConfigurationError from mongomock import Database from mongomock import helpers from mongomock import InvalidURI from mongomock import OperationFailure try: from bson import DBRef, decimal128 from bson.objectid import ObjectId import pymongo from pymongo import MongoClient as PymongoClient from pymongo import read_concern from pymongo.read_preferences import ReadPreference except ImportError: from mongomock.object_id import ObjectId from mongomock import read_concern from tests.utils import DBRef try: from bson.code import Code from bson.regex import Regex from bson.son import SON import execjs # noqa pylint: disable=unused-import _HAVE_MAP_REDUCE = any(r.is_available() for r in execjs.runtimes().values()) except ImportError: _HAVE_MAP_REDUCE = False Code = str from tests.multicollection import MultiCollection SERVER_VERSION = version.parse(mongomock.SERVER_VERSION) class InterfaceTest(TestCase): def test__can_create_db_without_path(self): self.assertIsNotNone(mongomock.MongoClient()) def test__can_create_db_with_path(self): self.assertIsNotNone(mongomock.MongoClient('mongodb://localhost')) def test__can_create_db_with_multiple_pathes(self): hostnames = ['mongodb://localhost:27017', 'mongodb://localhost:27018'] self.assertIsNotNone(mongomock.MongoClient(hostnames)) def test__repr(self): self.assertEqual(repr(mongomock.MongoClient()), "mongomock.MongoClient('localhost', 27017)") def test__bad_uri_raises(self): with self.assertRaises(InvalidURI): mongomock.MongoClient('http://host1') with self.assertRaises(InvalidURI): mongomock.MongoClient('://host1') with self.assertRaises(InvalidURI): mongomock.MongoClient('mongodb://') with self.assertRaises(InvalidURI): mongomock.MongoClient('mongodb://localhost/path/mongodb.sock') with self.assertRaises(InvalidURI): mongomock.MongoClient('mongodb://localhost?option') with self.assertRaises(ValueError): mongomock.MongoClient('mongodb:host2') def test__none_uri_host(self): self.assertIsNotNone(mongomock.MongoClient('host1')) self.assertIsNotNone(mongomock.MongoClient('//host2')) self.assertIsNotNone(mongomock.MongoClient('mongodb:12')) class DatabaseGettingTest(TestCase): def setUp(self): super(DatabaseGettingTest, self).setUp() self.client = mongomock.MongoClient() @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__get_database_read_concern(self): db = self.client.get_database('a', read_concern=read_concern.ReadConcern('majority')) self.assertEqual('majority', db.read_concern.level) def test__getting_database_via_getattr(self): db1 = self.client.some_database_here db2 = self.client.some_database_here self.assertIs(db1, db2) self.assertIs(db1, self.client['some_database_here']) self.assertIsInstance(db1, Database) self.assertIs(db1.client, self.client) self.assertIs(db2.client, self.client) def test__getting_database_via_getitem(self): db1 = self.client['some_database_here'] db2 = self.client['some_database_here'] self.assertIs(db1, db2) self.assertIs(db1, self.client.some_database_here) self.assertIsInstance(db1, Database) def test__drop_database(self): db = self.client.a collection = db.a doc_id = collection.insert_one({'aa': 'bb'}).inserted_id self.assertEqual(collection.count_documents({'_id': doc_id}), 1) self.client.drop_database('a') self.assertEqual(collection.count_documents({'_id': doc_id}), 0) db = self.client.a collection = db.a doc_id = collection.insert_one({'aa': 'bb'}).inserted_id self.assertEqual(collection.count_documents({'_id': doc_id}), 1) self.client.drop_database(db) self.assertEqual(collection.count_documents({'_id': doc_id}), 0) def test__drop_database_system_collection(self): db = self.client.a collection = db['system.foo'] doc_id = collection.insert_one({'aa': 'bb'}).inserted_id self.assertEqual(collection.count_documents({'_id': doc_id}), 1) self.client.drop_database('a') self.assertEqual(collection.count_documents({'_id': doc_id}), 0) def test__drop_database_indexes(self): db = self.client.somedb collection = db.a collection.create_index('simple') collection.create_index([('value', 1)], unique=True) collection.create_index([('sparsed', 1)], unique=True, sparse=True) self.client.drop_database('somedb') # Make sure indexes' rules no longer apply collection.insert_one({'value': 'not_unique_but_ok', 'sparsed': 'not_unique_but_ok'}) collection.insert_one({'value': 'not_unique_but_ok'}) collection.insert_one({'sparsed': 'not_unique_but_ok'}) self.assertEqual(collection.count_documents({}), 3) def test__sparse_unique_index(self): db = self.client.somedb collection = db.a collection.create_index([('value', 1)], unique=True, sparse=True) collection.insert_one({'value': 'should_be_unique'}) collection.insert_one({'simple': 'simple_without_value'}) collection.insert_one({'simple': 'simple_without_value2'}) collection.create_index([('value', 1)], unique=True, sparse=True) def test__alive(self): self.assertTrue(self.client.alive()) def test__dereference(self): db = self.client.a collection = db.a to_insert = {'_id': 'a', 'aa': 'bb'} collection.insert_one(to_insert) a = db.dereference(DBRef('a', 'a', db.name)) self.assertEqual(to_insert, a) def test__getting_default_database_valid(self): def gddb(uri): client = mongomock.MongoClient(uri) return client, client.get_default_database() c, db = gddb('mongodb://host1/foo') self.assertIsNotNone(db) self.assertIsInstance(db, Database) self.assertIs(db.client, c) self.assertIs(db, c['foo']) c, db = gddb('mongodb://host1/bar') self.assertIs(db, c['bar']) c, db = gddb(r'mongodb://a%00lice:f%00oo@127.0.0.1/t%00est') self.assertIs(db, c['t\x00est']) c, db = gddb('mongodb://bob:bar@[::1]:27018/admin') self.assertIs(db, c['admin']) c, db = gddb('mongodb://%24am:f%3Azzb%40zz@127.0.0.1/' 'admin%3F?authMechanism=MONGODB-CR') self.assertIs(db, c['admin?']) c, db = gddb(['mongodb://localhost:27017/foo', 'mongodb://localhost:27018/foo']) self.assertIs(db, c['foo']) # As of pymongo 3.5, get_database() is equivalent to # the old behavior of get_default_database() client = mongomock.MongoClient('mongodb://host1/foo') self.assertIs(client.get_database(), client['foo']) def test__getting_default_database_invalid(self): def client(uri): return mongomock.MongoClient(uri) c = client('mongodb://host1') with self.assertRaises(ConfigurationError): c.get_default_database() c = client('host1') with self.assertRaises(ConfigurationError): c.get_default_database() c = client('') with self.assertRaises(ConfigurationError): c.get_default_database() c = client('mongodb://host1/') with self.assertRaises(ConfigurationError): c.get_default_database() def test__getting_default_database_with_default_parameter(self): c = mongomock.MongoClient('mongodb://host1/') self.assertIs(c.get_default_database('foo'), c['foo']) self.assertIs(c.get_default_database(default='foo'), c['foo']) def test__getting_default_database_ignoring_default_parameter(self): c = mongomock.MongoClient('mongodb://host1/bar') self.assertIs(c.get_default_database('foo'), c['bar']) self.assertIs(c.get_default_database(default='foo'), c['bar']) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') def test__getting_default_database_preserves_options(self): client = mongomock.MongoClient('mongodb://host1/foo') db = client.get_database(read_preference=ReadPreference.NEAREST) self.assertEqual(db.name, 'foo') self.assertEqual(ReadPreference.NEAREST, db.read_preference) self.assertEqual(ReadPreference.PRIMARY, client.read_preference) class UTCPlus2(datetime.tzinfo): def fromutc(self, dt): return dt + self.utcoffset(dt) def tzname(self, dt): return '' def utcoffset(self, dt): return datetime.timedelta(hours=2) def dst(self, dt): return datetime.timedelta() @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') @skipIf(os.getenv('NO_LOCAL_MONGO'), 'No local Mongo server running') class _CollectionComparisonTest(TestCase): """Compares a fake collection with the real mongo collection implementation This is done via cross-comparison of the results. """ def setUp(self): super(_CollectionComparisonTest, self).setUp() self.fake_conn = mongomock.MongoClient() self.mongo_conn = self._connect_to_local_mongodb() self.db_name = 'mongomock___testing_db' self.collection_name = 'mongomock___testing_collection' self.mongo_conn.drop_database(self.db_name) self.mongo_collection = self.mongo_conn[self.db_name][self.collection_name] self.fake_collection = self.fake_conn[self.db_name][self.collection_name] self.cmp = MultiCollection({ 'fake': self.fake_collection, 'real': self.mongo_collection, }) def _create_compare_for_collection(self, collection_name, db_name=None): if not db_name: db_name = self.db_name mongo_collection = self.mongo_conn[db_name][collection_name] fake_collection = self.fake_conn[db_name][collection_name] return MultiCollection({ 'fake': fake_collection, 'real': mongo_collection, }) def _connect_to_local_mongodb(self, num_retries=60): """Performs retries on connection refused errors (for travis-ci builds)""" for retry in range(num_retries): if retry > 0: time.sleep(0.5) try: return PymongoClient( host=os.environ.get('TEST_MONGO_HOST', 'localhost'), maxPoolSize=1 ) except pymongo.errors.ConnectionFailure as e: if retry == num_retries - 1: raise if 'connection refused' not in e.message.lower(): raise def tearDown(self): super(_CollectionComparisonTest, self).tearDown() self.mongo_conn.close() class EqualityCollectionTest(_CollectionComparisonTest): def test__database_equality(self): self.assertEqual(self.mongo_conn[self.db_name], self.mongo_conn[self.db_name]) self.assertEqual(self.fake_conn[self.db_name], self.fake_conn[self.db_name]) @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__database_hashable(self): {self.mongo_conn[self.db_name]} # pylint: disable=pointless-statement {self.fake_conn[self.db_name]} # pylint: disable=pointless-statement @skipIf(sys.version_info < (3,), 'Older versions of Python do not handle hashing the same way') @skipUnless( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION < version.parse('3.12'), "older versions of pymongo didn't have proper hashing") def test__database_not_hashable(self): with self.assertRaises(TypeError): {self.mongo_conn[self.db_name]} # pylint: disable=pointless-statement with self.assertRaises(TypeError): {self.fake_conn[self.db_name]} # pylint: disable=pointless-statement class MongoClientCollectionTest(_CollectionComparisonTest): def test__find_is_empty(self): self.cmp.do.delete_many({}) self.cmp.compare.find() def test__inserting(self): self.cmp.do.delete_many({}) data = {'a': 1, 'b': 2, 'c': 'data'} self.cmp.do.insert_one(data) self.cmp.compare.find() # single document, no need to ignore order def test__bulk_insert(self): objs = [{'a': 2, 'b': {'c': 3}}, {'c': 5}, {'d': 7}] results_dict = self.cmp.do.insert_many(objs) for results in results_dict.values(): self.assertEqual(len(results.inserted_ids), len(objs)) self.assertEqual( len(set(results.inserted_ids)), len(results.inserted_ids), 'Returned object ids not unique!') self.cmp.compare_ignore_order.find() def test__insert(self): if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.insert({'a': 1}) return self.cmp.do.insert({'a': 1}) self.cmp.compare.find() def test__insert_one(self): self.cmp.do.insert_one({'a': 1}) self.cmp.compare.find() def test__insert_many(self): self.cmp.do.insert_many([{'a': 1}, {'a': 2}]) self.cmp.compare.find() def test__save(self): # add an item with a non ObjectId _id first. self.cmp.do.insert_one({'_id': 'b'}) if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.save({'_id': ObjectId(), 'someProp': 1}) return self.cmp.do.save({'_id': ObjectId(), 'someProp': 1}) self.cmp.compare_ignore_order.find() def test__insert_object_id_as_dict(self): self.cmp.do.delete_many({}) doc_ids = [ # simple top-level dictionary {'A': 1}, # dict with value as list {'A': [1, 2, 3]}, # dict with value as dict {'A': {'sub': {'subsub': 3}}} ] for doc_id in doc_ids: _id = { key: value.inserted_id for key, value in self.cmp.do.insert_one({'_id': doc_id, 'a': 1}).items() } self.assertEqual(_id['fake'], _id['real']) self.assertEqual(_id['fake'], doc_id) self.assertEqual(_id['real'], doc_id) self.assertEqual(type(_id['fake']), type(_id['real'])) self.cmp.compare.find({'_id': doc_id}) docs = self.cmp.compare.find_one({'_id': doc_id}) self.assertEqual(docs['fake']['_id'], doc_id) self.assertEqual(docs['real']['_id'], doc_id) self.cmp.do.delete_one({'_id': doc_id}) def test__count(self): if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.count() return self.cmp.compare.count() self.cmp.do.insert_one({'a': 1}) self.cmp.compare.count() self.cmp.do.insert_one({'a': 0}) self.cmp.compare.count() self.cmp.compare.count({'a': 1}) @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION < version.parse('3.8'), 'older version of pymongo does not have count_documents') def test__count_documents(self): self.cmp.compare.count_documents({}) self.cmp.do.insert_one({'a': 1}) self.cmp.compare.count_documents({}) self.cmp.do.insert_one({'a': 0}) self.cmp.compare.count_documents({}) self.cmp.compare.count_documents({'a': 1}) self.cmp.compare.count_documents({}, skip=10) self.cmp.compare.count_documents({}, skip=0) self.cmp.compare.count_documents({}, skip=10, limit=100) self.cmp.compare.count_documents({}, skip=10, limit=3) self.cmp.compare_exceptions.count_documents({}, limit='one') self.cmp.compare_exceptions.count_documents({}, limit='1') @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION < version.parse('3.8'), 'older version of pymongo does not have estimated_document_count') def test__estimated_document_count(self): self.cmp.compare.estimated_document_count() self.cmp.do.insert_one({'a': 1}) self.cmp.compare.estimated_document_count() self.cmp.do.insert_one({'a': 0}) self.cmp.compare.estimated_document_count() if SERVER_VERSION < version.parse('5'): self.cmp.compare.estimated_document_count(skip=2) else: self.cmp.compare_exceptions.estimated_document_count(skip=2) self.cmp.compare_exceptions.estimated_document_count(filter={'a': 1}) def test__reindex(self): self.cmp.compare.create_index('a') self.cmp.do.insert_one({'a': 1}) if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.reindex() return self.cmp.do.reindex() def test__find_one(self): self.cmp.do.insert_one({'_id': 'id1', 'name': 'new'}) self.cmp.compare.find_one({'_id': 'id1'}) self.cmp.do.insert_one({'_id': 'id2', 'name': 'another new'}) self.cmp.compare.find_one({'_id': 'id2'}, {'_id': 1}) self.cmp.compare.find_one('id2', {'_id': 1}) def test__find_one_no_args(self): self.cmp.do.insert_one({'_id': 'new_obj', 'field': 'value'}) self.cmp.compare.find_one() def test__find_by_attributes(self): id1 = ObjectId() self.cmp.do.insert_one({'_id': id1, 'name': 'new'}) self.cmp.do.insert_one({'name': 'another new'}) self.cmp.compare_ignore_order.sort_by(lambda doc: str(doc.get('name', str(doc)))).find() self.cmp.compare.find({'_id': id1}) def test__find_by_document(self): self.cmp.do.insert_one({'name': 'new', 'doc': {'key': 'val'}}) self.cmp.do.insert_one({'name': 'another new'}) self.cmp.do.insert_one({'name': 'new', 'doc': {'key': ['val']}}) self.cmp.do.insert_one({'name': 'new', 'doc': {'key': ['val', 'other val']}}) self.cmp.compare_ignore_order.find() self.cmp.compare.find({'doc': {'key': 'val'}}) self.cmp.compare.find({'doc': {'key': {'$eq': 'val'}}}) def test__find_by_empty_document(self): self.cmp.do.insert_one({'doc': {'data': 'val'}}) self.cmp.do.insert_one({'doc': {}}) self.cmp.do.insert_one({'doc': None}) self.cmp.compare.find({'doc': {}}) def test__find_by_attributes_return_fields(self): id1 = ObjectId() id2 = ObjectId() self.cmp.do.insert_one( {'_id': id1, 'name': 'new', 'someOtherProp': 2, 'nestedProp': {'a': 1}}) self.cmp.do.insert_one({'_id': id2, 'name': 'another new'}) self.cmp.compare_ignore_order.find({}, {'_id': 0}) # test exclusion of _id self.cmp.compare_ignore_order.find({}, {'_id': 1, 'someOtherProp': 1}) # test inclusion self.cmp.compare_ignore_order.find({}, {'_id': 0, 'someOtherProp': 0}) # test exclusion self.cmp.compare_ignore_order.find({}, {'_id': 0, 'someOtherProp': 1}) # test mixed _id:0 self.cmp.compare_ignore_order.find({}, {'someOtherProp': 0}) # test no _id, otherProp:0 self.cmp.compare_ignore_order.find({}, {'someOtherProp': 1}) # test no _id, otherProp:1 self.cmp.compare.find({'_id': id1}, {'_id': 0}) # test exclusion of _id self.cmp.compare.find({'_id': id1}, {'_id': 1, 'someOtherProp': 1}) # test inclusion self.cmp.compare.find({'_id': id1}, {'_id': 0, 'someOtherProp': 0}) # test exclusion # test mixed _id:0 self.cmp.compare.find({'_id': id1}, {'_id': 0, 'someOtherProp': 1}) # test no _id, otherProp:0 self.cmp.compare.find({'_id': id1}, {'someOtherProp': 0}) # test no _id, otherProp:1 self.cmp.compare.find({'_id': id1}, {'someOtherProp': 1}) def test__find_by_attributes_return_fields_elemMatch(self): id = ObjectId() self.cmp.do.insert_one({ '_id': id, 'owns': [ {'type': 'hat', 'color': 'black'}, {'type': 'hat', 'color': 'green'}, {'type': 't-shirt', 'color': 'black', 'size': 'small'}, {'type': 't-shirt', 'color': 'black'}, {'type': 't-shirt', 'color': 'white'} ], 'hat': 'red' }) elem = {'$elemMatch': {'type': 't-shirt', 'color': 'black'}} # test filtering on array field only self.cmp.compare.find({'_id': id}, {'owns': elem}) # test filtering on array field with inclusion self.cmp.compare.find({'_id': id}, {'owns': elem, 'hat': 1}) # test filtering on array field with exclusion self.cmp.compare.find({'_id': id}, {'owns': elem, 'hat': 0}) # test filtering on non array field self.cmp.compare.find({'_id': id}, {'hat': elem}) # test no match self.cmp.compare.find({'_id': id}, {'owns': {'$elemMatch': {'type': 'cap'}}}) def test__find_with_expr(self): self.cmp.do.insert_many([ {'_id': 1, 'a': [5]}, {'_id': 2, 'a': [1, 2, 3]}, {'_id': 3, 'a': []}, ]) self.cmp.compare.find({'$expr': {'$eq': [{'$size': ['$a']}, 1]}}) self.cmp.do.insert_one({'_id': 4}) self.cmp.compare_exceptions.find({'$expr': {'$eq': [{'$size': ['$a']}, 1]}}) def test_double_negation(self): self.cmp.do.insert_many([ {'_id': 1, 'a': 'some str'}, {'_id': 2, 'a': 'another str'}, {'_id': 3, 'a': []}, ]) self.cmp.compare.find({'a': {'$not': {'$not': {'$regex': '^some'}}}}) def test__size(self): id = ObjectId() self.cmp.do.insert_one({ '_id': id, 'l_string': 1, 'l_tuple': ['a', 'b'], 'null_field': None }) self.cmp.compare.find({'_id': id}) self.cmp.compare.find({'_id': id, 'l_string': {'$not': {'$size': 0}}}) self.cmp.compare.find({'_id': id, 'l_tuple': {'$size': 2}}) self.cmp.compare.find({'_id': id, 'missing_field': {'$size': 1}}) self.cmp.compare.find({'_id': id, 'null_field': {'$size': 1}}) def test__all_with_other_operators(self): objs = [{'list': ['a']}, {'list': ['a', 123]}, {'list': ['a', 123, 'xyz']}] self.cmp.do.insert_many(objs) self.cmp.compare.find({'list': {'$all': ['a'], '$size': 1}}) self.cmp.compare.find({'list': {'$all': ['a', 123], '$size': 2}}) self.cmp.compare.find({'list': {'$all': ['a', 123, 'xyz'], '$size': 3}}) self.cmp.compare.find({'list': {'$all': ['a'], '$size': 3}}) self.cmp.compare.find({'list': {'$all': ['a', 123], '$in': ['xyz']}}) self.cmp.compare.find({'list': {'$all': ['a', 123, 'xyz'], '$in': ['abcdef']}}) self.cmp.compare.find({'list': {'$all': ['a'], '$eq': ['a']}}) def test__regex_match_non_string(self): id = ObjectId() self.cmp.do.insert_one({ '_id': id, 'test': 1 }) self.cmp.compare.find({'_id': id, 'test': {'$regex': '1'}}) def test__regex_match_non_string_in_list(self): id = ObjectId() self.cmp.do.insert_one({ '_id': id, 'test': [3, 2, 1] }) self.cmp.compare.find({'_id': id, 'test': {'$regex': '1'}}) def test__find_by_dotted_attributes(self): """Test seaching with dot notation.""" green_bowler = { 'name': 'bob', 'hat': {'color': 'green', 'type': 'bowler'}} red_bowler = { 'name': 'sam', 'hat': {'color': 'red', 'type': 'bowler'}} self.cmp.do.insert_one(green_bowler) self.cmp.do.insert_one(red_bowler) self.cmp.compare_ignore_order.find() self.cmp.compare_ignore_order.find({'name': 'sam'}) self.cmp.compare_ignore_order.find({'hat.color': 'green'}) self.cmp.compare_ignore_order.find({'hat.type': 'bowler'}) self.cmp.compare.find({ 'hat.color': 'red', 'hat.type': 'bowler' }) self.cmp.compare.find({ 'name': 'bob', 'hat.color': 'red', 'hat.type': 'bowler' }) self.cmp.compare.find({'hat': 'a hat'}) self.cmp.compare.find({'hat.color.cat': 'red'}) def test__find_empty_array_field(self): # See #90 self.cmp.do.insert_one({'array_field': []}) self.cmp.compare.find({'array_field': []}) def test__find_non_empty_array_field(self): # See #90 self.cmp.do.insert_one({'array_field': [['abc']]}) self.cmp.do.insert_one({'array_field': ['def']}) self.cmp.compare.find({'array_field': ['abc']}) self.cmp.compare.find({'array_field': [['abc']]}) self.cmp.compare.find({'array_field': 'def'}) self.cmp.compare.find({'array_field': ['def']}) def test__find_by_objectid_in_list(self): # See #79 self.cmp.do.insert_one( {'_id': 'x', 'rel_id': [ObjectId('52d669dcad547f059424f783')]}) self.cmp.compare.find({'rel_id': ObjectId('52d669dcad547f059424f783')}) def test__find_subselect_in_list(self): # See #78 self.cmp.do.insert_one({'_id': 'some_id', 'a': [{'b': 1, 'c': 2}]}) self.cmp.compare.find_one({'a.b': 1}) def test__find_dict_in_nested_list(self): # See #539 self.cmp.do.insert_one({'a': {'b': [{'c': 1}]}}) self.cmp.compare.find({'a.b': {'c': 1}}) def test__find_by_regex_object(self): """Test searching with regular expression objects.""" bob = {'name': 'bob'} sam = {'name': 'sam'} self.cmp.do.insert_one(bob) self.cmp.do.insert_one(sam) self.cmp.compare_ignore_order.find() regex = re.compile('bob|sam') self.cmp.compare_ignore_order.find({'name': regex}) regex = re.compile('bob|notsam') self.cmp.compare_ignore_order.find({'name': regex}) self.cmp.compare_ignore_order.find({'name': {'$regex': regex}}) upper_regex = Regex('Bob') self.cmp.compare_ignore_order.find({'name': {'$regex': upper_regex}}) self.cmp.compare_ignore_order.find({'name': { '$regex': upper_regex, '$options': 'i', }}) self.cmp.compare_ignore_order.find({'name': { '$regex': upper_regex, '$options': 'I', }}) self.cmp.compare_ignore_order.find({'name': { '$regex': upper_regex, '$options': 'z', }}) def test__find_by_regex_string(self): """Test searching with regular expression string.""" bob = {'name': 'bob'} sam = {'name': 'sam'} self.cmp.do.insert_one(bob) self.cmp.do.insert_one(sam) self.cmp.compare_ignore_order.find() self.cmp.compare_ignore_order.find({'name': {'$regex': 'bob|sam'}}) self.cmp.compare_ignore_order.find({'name': {'$regex': 'bob|notsam'}}) self.cmp.compare_ignore_order.find({'name': {'$regex': 'Bob', '$options': 'i'}}) self.cmp.compare_ignore_order.find({'name': {'$regex': 'Bob', '$options': 'I'}}) self.cmp.compare_ignore_order.find({'name': {'$regex': 'Bob', '$options': 'z'}}) def test__find_in_array_by_regex_object(self): """Test searching inside array with regular expression object.""" bob = {'name': 'bob', 'text': ['abcd', 'cde']} sam = {'name': 'sam', 'text': ['bde']} self.cmp.do.insert_one(bob) self.cmp.do.insert_one(sam) regex = re.compile('^a') self.cmp.compare_ignore_order.find({'text': regex}) regex = re.compile('e$') self.cmp.compare_ignore_order.find({'text': regex}) regex = re.compile('bde|cde') self.cmp.compare_ignore_order.find({'text': regex}) def test__find_in_array_by_regex_string(self): """Test searching inside array with regular expression string""" bob = {'name': 'bob', 'text': ['abcd', 'cde']} sam = {'name': 'sam', 'text': ['bde']} self.cmp.do.insert_one(bob) self.cmp.do.insert_one(sam) self.cmp.compare_ignore_order.find({'text': {'$regex': '^a'}}) self.cmp.compare_ignore_order.find({'text': {'$regex': 'e$'}}) self.cmp.compare_ignore_order.find({'text': {'$regex': 'bcd|cde'}}) def test__find_by_regex_string_on_absent_field_dont_break(self): """Test searching on absent field with regular expression string dont break""" bob = {'name': 'bob'} sam = {'name': 'sam'} self.cmp.do.insert_one(bob) self.cmp.do.insert_one(sam) self.cmp.compare_ignore_order.find({'text': {'$regex': 'bob|sam'}}) def test__find_by_elemMatch(self): self.cmp.do.insert_one({'field': [{'a': 1, 'b': 2}, {'c': 3, 'd': 4}]}) self.cmp.do.insert_one({'field': [{'a': 1, 'b': 4}, {'c': 3, 'd': 8}]}) self.cmp.do.insert_one({'field': 'nonlist'}) self.cmp.do.insert_one({'field': 2}) self.cmp.compare.find({'field': {'$elemMatch': {'b': 1}}}) self.cmp.compare_ignore_order.find({'field': {'$elemMatch': {'a': 1}}}) self.cmp.compare.find({'field': {'$elemMatch': {'b': {'$gt': 3}}}}) def test__find_by_elemMatchDirectQuery(self): self.cmp.do.insert_many([ {'_id': 0, 'arr': [0, 1, 2, 3, 10]}, {'_id': 1, 'arr': [0, 2, 4, 6]}, {'_id': 2, 'arr': [1, 3, 5, 7]} ]) self.cmp.compare_ignore_order.find({'arr': {'$elemMatch': {'$lt': 10, '$gt': 4}}}) def test__find_in_array(self): self.cmp.do.insert_one({'field': [{'a': 1, 'b': 2}, {'c': 3, 'd': 4}]}) self.cmp.compare.find({'field.0.a': 1}) self.cmp.compare.find({'field.0.b': 2}) self.cmp.compare.find({'field.1.c': 3}) self.cmp.compare.find({'field.1.d': 4}) self.cmp.compare.find({'field.0': {'$exists': True}}) self.cmp.compare.find({'field.0': {'$exists': False}}) self.cmp.compare.find({'field.0.a': {'$exists': True}}) self.cmp.compare.find({'field.0.a': {'$exists': False}}) self.cmp.compare.find({'field.1.a': {'$exists': True}}) self.cmp.compare.find({'field.1.a': {'$exists': False}}) self.cmp.compare.find( {'field.0.a': {'$exists': True}, 'field.1.a': {'$exists': False}}) def test__find_in_array_equal_null(self): self.cmp.do.insert_many([ {'_id': 1, 'shape': [{'color': 'red'}]}, {'_id': 2, 'shape': [{'color': 'yellow'}]}, {'_id': 3, 'shape': [{'color': 'red'}, {'color': 'yellow'}]}, {'_id': 4, 'shape': [{'size': 3}]}, {'_id': 5}, {'_id': 6, 'shape': {'color': ['red', 'yellow']}}, {'_id': 7, 'shape': [{'color': 'red'}, {'color': None}]}, ]) self.cmp.compare_ignore_order.find({'shape.color': {'$eq': None}}) self.cmp.compare_ignore_order.find({'shape.color': None}) def test__find_notequal(self): """Test searching with operators other than equality.""" bob = {'_id': 1, 'name': 'bob'} sam = {'_id': 2, 'name': 'sam'} a_goat = {'_id': 3, 'goatness': 'very'} self.cmp.do.insert_many([bob, sam, a_goat]) self.cmp.compare_ignore_order.find() self.cmp.compare_ignore_order.find({'name': {'$ne': 'bob'}}) self.cmp.compare_ignore_order.find({'goatness': {'$ne': 'very'}}) self.cmp.compare_ignore_order.find({'goatness': {'$ne': 'not very'}}) self.cmp.compare_ignore_order.find({'snakeness': {'$ne': 'very'}}) def test__find_notequal_by_value(self): """Test searching for None.""" bob = {'_id': 1, 'name': 'bob', 'sheepness': {'sometimes': True}} sam = {'_id': 2, 'name': 'sam', 'sheepness': {'sometimes': True}} a_goat = {'_id': 3, 'goatness': 'very', 'sheepness': {}} self.cmp.do.insert_many([bob, sam, a_goat]) self.cmp.compare_ignore_order.find({'goatness': None}) self.cmp.compare_ignore_order.find({'sheepness.sometimes': None}) def test__find_not(self): bob = {'_id': 1, 'name': 'bob'} sam = {'_id': 2, 'name': 'sam'} self.cmp.do.insert_many([bob, sam]) self.cmp.compare_ignore_order.find() self.cmp.compare_ignore_order.find({'name': {'$not': {'$ne': 'bob'}}}) self.cmp.compare_ignore_order.find({'name': {'$not': {'$ne': 'sam'}}}) self.cmp.compare_ignore_order.find({'name': {'$not': {'$ne': 'dan'}}}) self.cmp.compare_ignore_order.find({'name': {'$not': {'$eq': 'bob'}}}) self.cmp.compare_ignore_order.find({'name': {'$not': {'$eq': 'sam'}}}) self.cmp.compare_ignore_order.find({'name': {'$not': {'$eq': 'dan'}}}) self.cmp.compare_ignore_order.find({'name': {'$not': re.compile('dan')}}) self.cmp.compare_ignore_order.find({'name': {'$not': Regex('dan')}}) def test__find_not_exceptions(self): # pylint: disable=expression-not-assigned self.cmp.do.insert_one(dict(noise='longhorn')) with self.assertRaises(OperationFailure): self.mongo_collection.find({'name': {'$not': True}})[0] with self.assertRaises(OperationFailure): self.fake_collection.find({'name': {'$not': True}})[0] with self.assertRaises(OperationFailure): self.mongo_collection.find({'name': {'$not': []}})[0] with self.assertRaises(OperationFailure): self.fake_collection.find({'name': {'$not': []}})[0] with self.assertRaises(OperationFailure): self.mongo_collection.find({'name': {'$not': ''}})[0] with self.assertRaises(OperationFailure): self.fake_collection.find({'name': {'$not': ''}})[0] def test__find_compare(self): self.cmp.do.insert_one(dict(noise='longhorn', sqrd='non numeric')) for x in range(10): self.cmp.do.insert_one(dict(num=x, sqrd=x * x)) self.cmp.compare_ignore_order.find({'sqrd': {'$lte': 4}}) self.cmp.compare_ignore_order.find({'sqrd': {'$lt': 4}}) self.cmp.compare_ignore_order.find({'sqrd': {'$gte': 64}}) self.cmp.compare_ignore_order.find({'sqrd': {'$gte': 25, '$lte': 36}}) def test__find_compare_objects(self): self.cmp.do.insert_many([ {'_id': 1, 'counts': {'circles': 3}}, {'_id': 2, 'counts': {'squares': 0}}, {'_id': 3, 'counts': {'arrows': 15}}, {'_id': 4, 'counts': {'circles': 1}}, {'_id': 5, 'counts': OrderedDict([ ('circles', 1), ('arrows', 15), ])}, {'_id': 6, 'counts': OrderedDict([ ('arrows', 15), ('circles', 1), ])}, {'_id': 7}, {'_id': 8, 'counts': {}}, {'_id': 9, 'counts': {'circles': 'three'}}, {'_id': 10, 'counts': {'circles': None}}, {'_id': 11, 'counts': {'circles': b'bytes'}}, ]) self.cmp.compare_ignore_order.find({'counts': {'$gt': {'circles': 1}}}) def test__find_compare_nested_objects(self): self.cmp.do.insert_many([ {'_id': 1, 'counts': {'circles': {'blue': 3}}}, {'_id': 2, 'counts': {'squares': 0}}, {'_id': 3, 'counts': {'arrows': {'blue': 2}}}, {'_id': 4, 'counts': {'circles': {}}}, {'_id': 5, 'counts': {'arrows': True}}, ]) self.cmp.compare_ignore_order.find( {'counts': {'$gt': {'circles': {'blue': 1}}}}) def test__find_sets(self): single = 4 even = [2, 4, 6, 8] prime = [2, 3, 5, 7] self.cmp.do.insert_many([ dict(x=single), dict(x=even), dict(x=prime), dict()]) self.cmp.compare_ignore_order.find({'x': {'$in': [7, 8]}}) self.cmp.compare_ignore_order.find({'x': {'$in': [4, 5]}}) self.cmp.compare_ignore_order.find({'x': {'$in': [4, None]}}) self.cmp.compare_ignore_order.find({'x': {'$nin': [2, 5]}}) self.cmp.compare_ignore_order.find({'x': {'$all': [2, 5]}}) self.cmp.compare_ignore_order.find({'x': {'$all': [7, 8]}}) self.cmp.compare_ignore_order.find({'x': 2}) self.cmp.compare_ignore_order.find({'x': 4}) self.cmp.compare_ignore_order.find({'$or': [{'x': 4}, {'x': 2}]}) self.cmp.compare_ignore_order.find({'$or': [{'x': 4}, {'x': 7}]}) self.cmp.compare_ignore_order.find({'$and': [{'x': 2}, {'x': 7}]}) self.cmp.compare_ignore_order.find({'$nor': [{'x': 3}]}) self.cmp.compare_ignore_order.find({'$nor': [{'x': 4}, {'x': 2}]}) def test__find_operators_in_list(self): self.cmp.do.insert_many([ dict(x=4), dict(x=[300, 500, 4]), dict(x=[1200, 300, 1400])]) self.cmp.compare_ignore_order.find({'x': {'$gte': 1100, '$lte': 1250}}) self.cmp.compare_ignore_order.find({'x': {'$gt': 300, '$lt': 400}}) def test__find_sets_regex(self): self.cmp.do.insert_many([ {'x': '123'}, {'x': ['abc', 'abd']}, ]) digits_pat = re.compile(r'^\d+') str_pat = re.compile(r'^ab[cd]') non_existing_pat = re.compile(r'^lll') self.cmp.compare_ignore_order.find({'x': {'$in': [digits_pat]}}) self.cmp.compare_ignore_order.find({'x': {'$in': [str_pat]}}) self.cmp.compare_ignore_order.find({'x': {'$in': [non_existing_pat]}}) self.cmp.compare_ignore_order.find({'x': {'$in': [non_existing_pat, '123']}}) self.cmp.compare_ignore_order.find({'x': {'$nin': [str_pat]}}) self.cmp.compare_ignore_order.find({'x': {'$nin': [non_existing_pat]}}) def test__find_negative_matches(self): self.cmp.do.insert_many([ {'_id': 1, 'shape': [{'color': 'red'}]}, {'_id': 2, 'shape': [{'color': 'yellow'}]}, {'_id': 3, 'shape': [{'color': 'red'}, {'color': 'yellow'}]}, {'_id': 4, 'shape': [{'size': 3}]}, {'_id': 5}, {'_id': 6, 'shape': {'color': ['red', 'yellow']}}, {'_id': 7, 'shape': {'color': 'red'}}, {'_id': 8, 'shape': {'color': ['blue', 'yellow']}}, {'_id': 9, 'shape': {'color': ['red']}}, ]) self.cmp.compare_ignore_order.find({'shape.color': {'$ne': 'red'}}) self.cmp.compare_ignore_order.find({'shape.color': {'$ne': ['red']}}) self.cmp.compare_ignore_order.find({'shape.color': {'$nin': ['blue', 'red']}}) def test__find_ne_multiple_keys(self): self.cmp.do.insert_many([ {'_id': 1, 'cases': [{'total': 1}]}, {'_id': 2, 'cases': [{'total': 2}]}, {'_id': 3, 'cases': [{'total': 3}]}, {'_id': 4, 'cases': []}, {'_id': 5}, ]) self.cmp.compare_ignore_order.find({'cases.total': {'$gt': 1, '$ne': 3}}) self.cmp.compare_ignore_order.find({'cases.total': {'$gt': 1, '$nin': [1, 3]}}) def test__find_and_modify_remove(self): self.cmp.do.insert_many([{'a': x, 'junk': True} for x in range(10)]) if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.find_and_modify( {'a': 2}, remove=True, fields={'_id': False, 'a': True}) return self.cmp.compare.find_and_modify({'a': 2}, remove=True, fields={'_id': False, 'a': True}) self.cmp.compare_ignore_order.find() def test__find_one_and_delete(self): self.cmp.do.insert_many([{'a': i} for i in range(10)]) self.cmp.compare.find_one_and_delete({'a': 5}, {'_id': False}) self.cmp.compare.find() def test__find_one_and_replace(self): self.cmp.do.insert_many([{'a': i} for i in range(10)]) self.cmp.compare.find_one_and_replace( {'a': 5}, {'a': 11}, projection={'_id': False}) self.cmp.compare.find() def test__find_one_and_update(self): self.cmp.do.insert_many([{'a': i} for i in range(10)]) self.cmp.compare.find_one_and_update( {'a': 5}, {'$set': {'a': 11}}, projection={'_id': False}) self.cmp.compare.find() def test__find_sort_list(self): self.cmp.do.delete_many({}) for data in ({'a': 1, 'b': 3, 'c': 'data1'}, {'a': 2, 'b': 2, 'c': 'data3'}, {'a': 3, 'b': 1, 'c': 'data2'}): self.cmp.do.insert_one(data) self.cmp.compare.find(sort=[('a', 1), ('b', -1)]) self.cmp.compare.find(sort=[('b', 1), ('a', -1)]) self.cmp.compare.find(sort=[('b', 1), ('a', -1), ('c', 1)]) def test__find_sort_list_empty_order(self): self.cmp.do.delete_many({}) for data in ({'a': 1}, {'a': 2, 'b': -2}, {'a': 3, 'b': 4}, {'a': 4, 'b': b'bin1'}, {'a': 4, 'b': b'bin2'}, {'a': 4, 'b': b'alongbin1'}, {'a': 4, 'b': b'alongbin2'}, {'a': 4, 'b': b'zlongbin1'}, {'a': 4, 'b': b'zlongbin2'}): self.cmp.do.insert_one(data) self.cmp.compare.find(sort=[('b', 1)]) self.cmp.compare.find(sort=[('b', -1)]) def test__find_sort_list_nested_doc(self): self.cmp.do.delete_many({}) for data in ({'root': {'a': 1, 'b': 3, 'c': 'data1'}}, {'root': {'a': 2, 'b': 2, 'c': 'data3'}}, {'root': {'a': 3, 'b': 1, 'c': 'data2'}}): self.cmp.do.insert_one(data) self.cmp.compare.find(sort=[('root.a', 1), ('root.b', -1)]) self.cmp.compare.find(sort=[('root.b', 1), ('root.a', -1)]) self.cmp.compare.find( sort=[ ('root.b', 1), ('root.a', -1), ('root.c', 1)]) def test__find_sort_list_nested_list(self): self.cmp.do.delete_many({}) for data in ({'root': [{'a': 1, 'b': 3, 'c': 'data1'}]}, {'root': [{'a': 2, 'b': 2, 'c': 'data3'}]}, {'root': [{'a': 3, 'b': 1, 'c': 'data2'}]}): self.cmp.do.insert_one(data) self.cmp.compare.find(sort=[('root.0.a', 1), ('root.0.b', -1)]) self.cmp.compare.find(sort=[('root.0.b', 1), ('root.0.a', -1)]) self.cmp.compare.find( sort=[ ('root.0.b', 1), ('root.0.a', -1), ('root.0.c', 1)]) def test__find_limit(self): self.cmp.do.delete_many({}) for data in ({'a': 1, 'b': 3, 'c': 'data1'}, {'a': 2, 'b': 2, 'c': 'data3'}, {'a': 3, 'b': 1, 'c': 'data2'}): self.cmp.do.insert_one(data) self.cmp.compare.find(limit=2, sort=[('a', 1), ('b', -1)]) # pymongo limit defaults to 0, returning everything self.cmp.compare.find(limit=0, sort=[('a', 1), ('b', -1)]) def test__find_projection_subdocument_lists(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'a': 1, 'b': [{'c': 3, 'd': 4}, {'c': 5, 'd': 6}]}) self.cmp.compare.find_one({'a': 1}, {'_id': 0, 'a': 1, 'b': 1}) self.cmp.compare_exceptions.find_one( {'a': 1}, OrderedDict([('_id', 0), ('a', 1), ('b', 1), ('b.c', 1)])) self.cmp.compare_exceptions.find_one( {'a': 1}, OrderedDict([('_id', 0), ('a', 1), ('b.c', 1), ('b', 1)])) self.cmp.compare.find_one({'a': 1}, {'_id': 0, 'a': 1, 'b.c': 1}) self.cmp.compare.find_one({'a': 1}, {'_id': 0, 'a': 0, 'b.c': 0}) self.cmp.compare.find_one({'a': 1}, {'_id': 0, 'a': 1, 'b.c.e': 1}) self.cmp.compare_exceptions.find_one( {'a': 1}, OrderedDict([('_id', 0), ('a', 0), ('b.c', 0), ('b.c.e', 0)])) # This one is not implemented in mongmock yet. # self.cmp.compare.find_one( # {'a': 1}, OrderedDict([('_id', 0), ('a', 0), ('b.c.e', 0), ('b.c', 0)])) def test__find_type(self): supported_types = ( 'double', 'string', 'object', 'array', 'binData', 'objectId', 'bool', 'date', 'int', 'long', 'decimal', 'number', ) self.cmp.do.insert_many([ {'a': 1.2}, # double {'a': 'a string value'}, # string {'a': {'b': 1}}, # object {'a': [1, 2, 3]}, # array or int {'a': b'hello'}, # binData {'a': ObjectId()}, # objectId {'a': True}, # bool {'a': datetime.datetime.now()}, # date {'a': 1}, # int {'a': 1 << 32}, # long {'a': decimal128.Decimal128('1.1')}, # decimal ]) for type_name in supported_types: self.cmp.compare.find({'a': {'$type': type_name}}) @skipIf(sys.version_info < (3, 7), 'Older versions of Python cannot copy regex partterns') @skipIf( helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above do not specify uuid encoding') def test__sort_mixed_types(self): self.cmp.do.insert_many([ {'type': 'bool', 'a': True}, {'type': 'datetime', 'a': datetime.datetime.now()}, {'type': 'dict', 'a': {'a': 1}}, {'type': 'emptyList', 'a': []}, {'type': 'int', 'a': 1}, {'type': 'listOfList', 'a': [[1, 2], [3, 4]]}, {'type': 'missing'}, {'type': 'None', 'a': None}, {'type': 'ObjectId', 'a': ObjectId()}, {'type': 'regex', 'a': re.compile('a')}, {'type': 'repeatedInt', 'a': [1, 2]}, {'type': 'string', 'a': 'a'}, {'type': 'tupleOfTuple', 'a': ((1, 2), (3, 4))}, {'type': 'uuid', 'a': uuid.UUID(int=3)}, {'type': 'DBRef', 'a': DBRef('a', 'a', 'db_name')} ]) self.cmp.compare.find({}, sort=[('a', 1), ('type', 1)]) @skipIf( helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above do not specify uuid encoding') def test__find_sort_uuid(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'_id': uuid.UUID(int=3), 'timestamp': 99, 'a': 1}, {'_id': uuid.UUID(int=1), 'timestamp': 100, 'a': 3}, {'_id': uuid.UUID(int=2), 'timestamp': 100, 'a': 2}, ]) self.cmp.compare.find({}, sort=[('timestamp', 1), ('_id', 1)]) @skipIf( helpers.PYMONGO_VERSION < version.parse('4.0'), 'old version of pymongo accepts to encode uuid') def test__fail_at_uuid_encoding(self): self.cmp.compare_exceptions.insert_one({'_id': uuid.UUID(int=2)}) def test__find_all(self): self.cmp.do.insert_many([ { 'code': 'ijk', 'tags': ['electronics', 'school'], 'qty': [{'size': 'M', 'num': 100, 'color': 'green'}], }, { 'code': 'efg', 'tags': ['school', 'book'], 'qty': [ {'size': 'S', 'num': 10, 'color': 'blue'}, {'size': 'M', 'num': 100, 'color': 'blue'}, {'size': 'L', 'num': 100, 'color': 'green'}, ], }, ]) self.cmp.compare.find({'qty.size': {'$all': ['M', 'L']}}) # def test__as_class(self): # class MyDict(dict): # pass # # self.cmp.do.delete_many({}) # self.cmp.do.insert_one( # {'a': 1, 'b': {'ba': 3, 'bb': 4, 'bc': [{'bca': 5}]}}) # self.cmp.compare.find({}, as_class=MyDict) # self.cmp.compare.find({'a': 1}, as_class=MyDict) def test__return_only_selected_fields(self): self.cmp.do.insert_one({'name': 'Chucky', 'type': 'doll', 'model': 'v6'}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection=['type']) def test__return_only_selected_fields_no_id(self): self.cmp.do.insert_one({'name': 'Chucky', 'type': 'doll', 'model': 'v6'}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection={'type': 1, '_id': 0}) def test__return_only_selected_fields_nested_field_found(self): self.cmp.do.insert_one( {'name': 'Chucky', 'properties': {'type': 'doll', 'model': 'v6'}}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection=['properties.type']) def test__return_only_selected_fields_nested_field_not_found(self): self.cmp.do.insert_one( {'name': 'Chucky', 'properties': {'type': 'doll', 'model': 'v6'}}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection=['properties.color']) def test__return_only_selected_fields_nested_field_found_no_id(self): self.cmp.do.insert_one( {'name': 'Chucky', 'properties': {'type': 'doll', 'model': 'v6'}}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection={'properties.type': 1, '_id': 0}) def test__return_only_selected_fields_nested_field_not_found_no_id(self): self.cmp.do.insert_one( {'name': 'Chucky', 'properties': {'type': 'doll', 'model': 'v6'}}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection={'properties.color': 1, '_id': 0}) def test__exclude_selected_fields(self): self.cmp.do.insert_one({'name': 'Chucky', 'type': 'doll', 'model': 'v6'}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection={'type': 0}) def test__exclude_selected_fields_including_id(self): self.cmp.do.insert_one({'name': 'Chucky', 'type': 'doll', 'model': 'v6'}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection={'type': 0, '_id': 0}) def test__exclude_all_fields_including_id(self): self.cmp.do.insert_one({'name': 'Chucky', 'type': 'doll'}) self.cmp.compare.find( {'name': 'Chucky'}, projection={'type': 0, '_id': 0, 'name': 0}) def test__exclude_selected_nested_fields(self): self.cmp.do.insert_one( {'name': 'Chucky', 'properties': {'type': 'doll', 'model': 'v6'}}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection={'properties.type': 0}) def test__exclude_all_selected_nested_fields(self): self.cmp.do.insert_one( {'name': 'Chucky', 'properties': {'type': 'doll', 'model': 'v6'}}) self.cmp.compare_ignore_order.find( {'name': 'Chucky'}, projection={'properties.type': 0, 'properties.model': 0}) def test__default_fields_if_projection_empty(self): self.cmp.do.insert_one({'name': 'Chucky', 'type': 'doll', 'model': 'v6'}) self.cmp.compare_ignore_order.find({'name': 'Chucky'}, projection=[]) def test__projection_slice_int_first(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare.find({'name': 'Array'}, projection={'name': 1, 'values': {'$slice': 1}}) def test__projection_slice_int_last(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare.find({'name': 'Array'}, projection={'name': 1, 'values': {'$slice': -1}}) def test__projection_slice_list_pos(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': [3, 1]}}) def test__projection_slice_list_neg(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': [-3, 1]}}) def test__projection_slice_list_pos_to_end(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': [3, 10]}}) def test__projection_slice_list_neg_to_end(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': [-3, 10]}}) def test__projection_slice_list_select_subfield(self): self.cmp.do.insert_one({'name': 'Array', 'values': [ {'num': 0, 'val': 1}, {'num': 1, 'val': 2}]}) self.cmp.compare_exceptions.find({'name': 'Array'}, projection={ 'values.num': 1, 'values': {'$slice': 1}}) def test__projection_slice_list_wrong_num_slice(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare_exceptions.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': [-3, 10, 1]}}) def test__projection_slice_list_wrong_slice_type(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare_exceptions.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': [1.0]}}) def test__projection_slice_list_wrong_slice_value_type(self): self.cmp.do.insert_one({'name': 'Array', 'values': [0, 1, 2, 3, 4, 5, 6, 7]}) self.cmp.compare_exceptions.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': '3'}}) def test__projection_slice_list_wrong_value_type(self): self.cmp.do.insert_one({'name': 'Array', 'values': 0}) self.cmp.compare_exceptions.find({'name': 'Array'}, projection={ 'name': 1, 'values': {'$slice': 1}}) def test__remove(self): """Test the remove method.""" self.cmp.do.insert_one({'value': 1}) self.cmp.compare_ignore_order.find() if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.remove() return self.cmp.do.remove() self.cmp.compare.find() self.cmp.do.insert_many([ {'name': 'bob'}, {'name': 'sam'}, ]) self.cmp.compare_ignore_order.find() self.cmp.do.remove({'name': 'bob'}) self.cmp.compare_ignore_order.find() self.cmp.do.remove({'name': 'notsam'}) self.cmp.compare.find() self.cmp.do.remove({'name': 'sam'}) self.cmp.compare.find() def test__delete_one(self): self.cmp.do.insert_many([{'a': i} for i in range(10)]) self.cmp.compare.find() self.cmp.do.delete_one({'a': 5}) self.cmp.compare.find() def test__delete_many(self): self.cmp.do.insert_many([{'a': i} for i in range(10)]) self.cmp.compare.find() self.cmp.do.delete_many({'a': {'$gt': 5}}) self.cmp.compare.find() def test__update(self): doc = {'a': 1} self.cmp.do.insert_one(doc) new_document = {'new_attr': 2} if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.update({'a': 1}, new_document) return self.cmp.do.update({'a': 1}, new_document) self.cmp.compare_ignore_order.find() @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above dropped update') def test__update_upsert_with_id(self): self.cmp.do.update( {'a': 1}, {'_id': ObjectId('52d669dcad547f059424f783'), 'a': 1}, upsert=True) self.cmp.compare.find() def test__update_with_zero_id(self): self.cmp.do.insert_one({'_id': 0}) self.cmp.do.replace_one({'_id': 0}, {'a': 1}) self.cmp.compare.find() def test__update_upsert_with_dots(self): self.cmp.do.update_one( {'a.b': 1}, {'$set': {'c': 2}}, upsert=True) self.cmp.compare.find() def test__update_upsert_with_operators(self): self.cmp.do.update_one( {'$or': [{'name': 'billy'}, {'name': 'Billy'}]}, {'$set': {'name': 'Billy', 'age': 5}}, upsert=True) self.cmp.compare.find() self.cmp.do.update_one({'a.b': {'$eq': 1}, 'd': {}}, {'$set': {'c': 2}}, upsert=True) self.cmp.compare.find() def test__update_upsert_with_matched_subdocuments(self): self.cmp.do.update_one( {'b.c.': 1, 'b.d': 3}, {'$set': {'a': 1}}, upsert=True) self.cmp.compare.find() @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 or above dropped update') def test__update_with_empty_document_comes(self): """Tests calling update_one with just '{}' for replacing whole document""" self.cmp.do.insert_one({'name': 'bob', 'hat': 'wide'}) self.cmp.do.update({'name': 'bob'}, {}) self.cmp.compare.find() def test__update_one(self): self.cmp.do.insert_many([{'a': 1, 'b': 0}, {'a': 2, 'b': 0}]) self.cmp.compare.find() self.cmp.do.update_one({'a': 2}, {'$set': {'b': 1}}) self.cmp.compare.find() self.cmp.do.update_one({'a': 3}, {'$set': {'a': 3, 'b': 0}}) self.cmp.compare.find() self.cmp.do.update_one({'a': 3}, {'$set': {'a': 3, 'b': 0}}, upsert=True) self.cmp.compare.find() self.cmp.compare_exceptions.update_one({}, {'$set': {}}) self.cmp.compare_exceptions.update_one({'a': 'does-not-exist'}, {'$set': {}}) self.cmp.compare_exceptions.update_one({'a': 'does-not-exist'}, {'$set': {}}, upsert=True) def test__update_many(self): self.cmp.do.insert_many([{'a': 1, 'b': 0}, {'a': 2, 'b': 0}]) self.cmp.compare.find() self.cmp.do.update_many({'b': 1}, {'$set': {'b': 1}}) self.cmp.compare.find() self.cmp.do.update_many({'b': 0}, {'$set': {'b': 1}}) self.cmp.compare.find() def test__replace_one(self): self.cmp.do.insert_many([{'a': 1, 'b': 0}, {'a': 2, 'b': 0}]) self.cmp.compare.find() self.cmp.do.replace_one({'a': 2}, {'a': 3, 'b': 0}) self.cmp.compare.find() self.cmp.do.replace_one({'a': 4}, {'a': 4, 'b': 0}) self.cmp.compare.find() self.cmp.do.replace_one({'a': 4}, {'a': 4, 'b': 0}, upsert=True) self.cmp.compare.find() def test__set(self): """Tests calling update with $set members.""" self.cmp.do.update_one( {'_id': 42}, {'$set': {'some': 'thing'}}, upsert=True) self.cmp.compare.find({'_id': 42}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_one({'name': 'bob'}, {'$set': {'hat': 'green'}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_one({'name': 'bob'}, {'$set': {'hat': 'red'}}) self.cmp.compare.find({'name': 'bob'}) def test__unset(self): """Tests calling update with $unset members.""" self.cmp.do.update_many({'name': 'bob'}, {'$set': {'a': 'aaa'}}, upsert=True) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$unset': {'a': 0}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$set': {'a': 'aaa'}}, upsert=True) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$unset': {'a': 1}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$set': {'a': 'aaa'}}, upsert=True) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$unset': {'a': ''}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$set': {'a': 'aaa'}}, upsert=True) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$unset': {'a': True}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$set': {'a': 'aaa'}}, upsert=True) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$unset': {'a': False}}) self.cmp.compare.find({'name': 'bob'}) def test__unset_nested(self): self.cmp.do.update_many({'_id': 1}, {'$set': {'a': {'b': 1, 'c': 2}}}, upsert=True) self.cmp.do.update_many({'_id': 1}, {'$unset': {'a.b': True}}) self.cmp.compare.find() self.cmp.do.update_many({'_id': 1}, {'$set': {'a': {'b': 1, 'c': 2}}}, upsert=True) self.cmp.do.update_many({'_id': 1}, {'$unset': {'a.b': False}}) self.cmp.compare.find() self.cmp.do.update_many({'_id': 1}, {'$set': {'a': {'b': 1}}}, upsert=True) self.cmp.do.update_many({'_id': 1}, {'$unset': {'a.b': True}}) self.cmp.compare.find() self.cmp.do.update_many({'_id': 1}, {'$set': {'a': {'b': 1}}}, upsert=True) self.cmp.do.update_many({'_id': 1}, {'$unset': {'a.b': False}}) self.cmp.compare.find() def test__unset_positional(self): self.cmp.do.insert_one({'a': 1, 'b': [{'c': 2, 'd': 3}]}) self.cmp.do.update_many( {'a': 1, 'b': {'$elemMatch': {'c': 2, 'd': 3}}}, {'$unset': {'b.$.c': ''}} ) self.cmp.compare.find() def test__set_upsert(self): self.cmp.do.delete_many({}) self.cmp.do.update_many({'name': 'bob'}, {'$set': {'age': 1}}, True) self.cmp.compare.find() self.cmp.do.update_many({'name': 'alice'}, {'$set': {'age': 1}}, True) self.cmp.compare_ignore_order.find() def test__set_subdocument_array(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': [0, 0]}) self.cmp.do.insert_one({'name': 'bob', 'some_field': 'B', 'data': [0, 0]}) self.cmp.do.update_many({'name': 'bob'}, {'$set': {'some_field': 'A', 'data.1': 3}}) self.cmp.compare.find() def test__set_subdocument_array_bad_index_after_dot(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'some_field': 'B', 'data': [0, 0]}) self.cmp.do.update_many({'name': 'bob'}, {'$set': {'some_field': 'A', 'data.3': 1}}) self.cmp.compare.find() def test__set_subdocument_array_bad_neg_index_after_dot(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'some_field': 'B', 'data': [0, 0]}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$set': {'data.-3': 1}}) def test__set_subdocuments_positional(self): self.cmp.do.insert_one({'name': 'bob', 'subdocs': [ {'id': 1, 'name': 'foo'}, {'id': 2, 'name': 'bar'} ]}) self.cmp.do.update_many( {'name': 'bob', 'subdocs.id': 2}, {'$set': {'subdocs.$': {'id': 3, 'name': 'baz'}}}) self.cmp.compare.find() def test__inc(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) for _ in range(3): self.cmp.do.update_many({'name': 'bob'}, {'$inc': {'count': 1}}) self.cmp.compare.find({'name': 'bob'}) def test__max(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) for i in range(3): self.cmp.do.update_many({'name': 'bob'}, {'$max': {'count': i}}) self.cmp.compare.find({'name': 'bob'}) def test__min(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) for i in range(3): self.cmp.do.update_many({'name': 'bob'}, {'$min': {'count': i}}) self.cmp.compare.find({'name': 'bob'}) def test__inc_upsert(self): self.cmp.do.delete_many({}) for _ in range(3): self.cmp.do.update_many({'name': 'bob'}, {'$inc': {'count': 1}}, True) self.cmp.compare.find({'name': 'bob'}) def test__inc_subdocument(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': {'age': 0}}) self.cmp.do.update_many({'name': 'bob'}, {'$inc': {'data.age': 1}}) self.cmp.compare.find() self.cmp.do.update_many({'name': 'bob'}, {'$inc': {'data.age2': 1}}) self.cmp.compare.find() def test__inc_subdocument_array(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': [0, 0]}) self.cmp.do.update_many({'name': 'bob'}, {'$inc': {'data.1': 1}}) self.cmp.compare.find() self.cmp.do.update_many({'name': 'bob'}, {'$inc': {'data.1': 1}}) self.cmp.compare.find() def test__inc_subdocument_array_bad_index_after_dot(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': [0, 0]}) self.cmp.do.update_many({'name': 'bob'}, {'$inc': {'data.3': 1}}) self.cmp.compare.find() def test__inc_subdocument_array_bad_neg_index_after_dot(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': [0, 0]}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$inc': {'data.-3': 1}}) def test__inc_subdocument_positional(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': [{'age': 0}, {'age': 1}]}) self.cmp.do.update_many( {'name': 'bob', 'data': {'$elemMatch': {'age': 0}}}, {'$inc': {'data.$.age': 1}}) self.cmp.compare.find() def test__setOnInsert(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$setOnInsert': {'age': 1}}) self.cmp.compare.find() self.cmp.do.update_many({'name': 'ann'}, {'$setOnInsert': {'age': 1}}) self.cmp.compare.find() def test__setOnInsert_upsert(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$setOnInsert': {'age': 1}}, True) self.cmp.compare.find() self.cmp.do.update_many({'name': 'ann'}, {'$setOnInsert': {'age': 1}}, True) self.cmp.compare.find() def test__setOnInsert_subdocument(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': {'age': 0}}) self.cmp.do.update_many({'name': 'bob'}, {'$setOnInsert': {'data.age': 1}}) self.cmp.compare.find() self.cmp.do.update_many({'name': 'bob'}, {'$setOnInsert': {'data.age1': 1}}) self.cmp.compare.find() self.cmp.do.update_many({'name': 'ann'}, {'$setOnInsert': {'data.age': 1}}) self.cmp.compare.find() def test__setOnInsert_subdocument_upsert(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': {'age': 0}}) self.cmp.do.update_many( {'name': 'bob'}, {'$setOnInsert': {'data.age': 1}}, True) self.cmp.compare.find() self.cmp.do.update_many( {'name': 'bob'}, {'$setOnInsert': {'data.age1': 1}}, True) self.cmp.compare.find() self.cmp.do.update_many( {'name': 'ann'}, {'$setOnInsert': {'data.age': 1}}, True) self.cmp.compare.find() def test__setOnInsert_subdocument_elemMatch(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': [{'age': 0}, {'age': 1}]}) self.cmp.do.update_many( {'name': 'bob', 'data': {'$elemMatch': {'age': 0}}}, {'$setOnInsert': {'data.$.age': 1}}) self.cmp.compare.find() def test__inc_subdocument_positional_upsert(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'data': [{'age': 0}, {'age': 1}]}) self.cmp.do.update_many( {'name': 'bob', 'data': {'$elemMatch': {'age': 0}}}, {'$setOnInsert': {'data.$.age': 1}}, True) self.cmp.compare.find() def test__set_dollar_operand(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'recordId': 1234, 'app': [ {'application': 'AppName', 'code': 1234, 'property': 'oldValue'}, {'application': 'AppName1', 'code': 1235, 'property': 'oldValue1'}]}) self.cmp.do.update_many( {'app': {'$elemMatch': {'application': 'AppName', 'code': 1234}}}, {'$set': {'app.$': {'application': 'AppName', 'code': 1234, 'property': 'newValue'}}}) def test__addToSet(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) for _ in range(3): self.cmp.do.update_many({'name': 'bob'}, {'$addToSet': {'hat': 'green'}}) self.cmp.compare.find({'name': 'bob'}) for _ in range(3): self.cmp.do.update_many({'name': 'bob'}, {'$addToSet': {'hat': 'tall'}}) self.cmp.compare.find({'name': 'bob'}) def test__addToSet_nested(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) for _ in range(3): self.cmp.do.update_many( {'name': 'bob'}, {'$addToSet': {'hat.color': 'green'}}) self.cmp.compare.find({'name': 'bob'}) for _ in range(3): self.cmp.do.update_many( {'name': 'bob'}, {'$addToSet': {'hat.color': 'tall'}}) self.cmp.compare.find({'name': 'bob'}) def test__addToSet_each(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) for _ in range(3): self.cmp.do.update_many( {'name': 'bob'}, {'$addToSet': {'hat': {'$each': ['green', 'yellow']}}}) self.cmp.compare.find({'name': 'bob'}) for _ in range(3): self.cmp.do.update_many( {'name': 'bob'}, {'$addToSet': {'shirt.color': {'$each': ['green', 'yellow']}}}) self.cmp.compare.find({'name': 'bob'}) def test__addToSet_dollar_operand(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'takes': [{'a': 2, 'tags': []}, {'a': 1, 'tags': [2]}]}) self.cmp.do.update_many( {'takes': {'$elemMatch': {'a': 1}}}, {'$addToSet': {'takes.$.tags': 3}}) def test__pop(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': ['green', 'tall']}) self.cmp.do.update_many({'name': 'bob'}, {'$pop': {'hat': 1}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': ['green', 'tall']}) self.cmp.do.update_many({'name': 'bob'}, {'$pop': {'hat': -1}}) self.cmp.compare.find({'name': 'bob'}) def test__pop_invalid_type(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': 'green'}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$pop': {'hat': 1}}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$pop': {'hat': -1}}) def test__pop_invalid_syntax(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': ['green']}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$pop': {'hat': 2}}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$pop': {'hat': '5'}}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$pop': {'hat.-1': 1}}) def test__pop_array_in_array(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': [['green']]}) self.cmp.do.update_many({'name': 'bob'}, {'$pop': {'hat.0': 1}}) self.cmp.compare.find({'name': 'bob'}) def test__pop_too_far_in_array(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': [['green']]}) self.cmp.do.update_many({'name': 'bob'}, {'$pop': {'hat.50': 1}}) self.cmp.compare.find({'name': 'bob'}) def test__pop_document_in_array(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': [{'hat': ['green']}]}) self.cmp.do.update_many({'name': 'bob'}, {'$pop': {'hat.0.hat': 1}}) self.cmp.compare.find({'name': 'bob'}) def test__pop_invalid_document_in_array(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': [{'hat': 'green'}]}) self.cmp.compare_exceptions.update_many({'name': 'bob'}, {'$pop': {'hat.0.hat': 1}}) def test__pop_empty(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': []}) self.cmp.do.update_many({'name': 'bob'}, {'$pop': {'hat': 1}}) self.cmp.compare.find({'name': 'bob'}) def test__pull(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$pull': {'hat': 'green'}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': ['green', 'tall']}) self.cmp.do.update_many({'name': 'bob'}, {'$pull': {'hat': 'green'}}) self.cmp.compare.find({'name': 'bob'}) def test__pull_query(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': [{'size': 5}, {'size': 10}]}) self.cmp.do.update_many( {'name': 'bob'}, {'$pull': {'hat': {'size': {'$gt': 6}}}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.delete_many({}) self.cmp.do.insert_one( {'name': 'bob', 'hat': {'sizes': [{'size': 5}, {'size': 8}, {'size': 10}]}} ) self.cmp.do.update_many( {'name': 'bob'}, {'$pull': {'hat.sizes': {'size': {'$gt': 6}}}}) self.cmp.compare.find({'name': 'bob'}) def test__pull_in_query_operator(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'sizes': [0, 1, 2, 3, 4, 5]}) self.cmp.do.update_one({'name': 'bob'}, {'$pull': {'sizes': {'$in': [1, 3]}}}) self.cmp.compare.find({'name': 'bob'}) def test__pull_in_nested_field(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'nested': {'sizes': [0, 1, 2, 3, 4, 5]}}) self.cmp.do.update_one({'name': 'bob'}, {'$pull': {'nested.sizes': {'$in': [1, 3]}}}) self.cmp.compare.find({'name': 'bob'}) def test__pull_nested_dict(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({ 'name': 'bob', 'hat': [ {'name': 'derby', 'sizes': [{'size': 'L', 'quantity': 3}, {'size': 'XL', 'quantity': 4}], 'colors': ['green', 'blue']}, {'name': 'cap', 'sizes': [{'size': 'S', 'quantity': 10}, {'size': 'L', 'quantity': 5}], 'colors': ['blue']}]}) self.cmp.do.update_many( {'hat': {'$elemMatch': {'name': 'derby'}}}, {'$pull': {'hat.$.sizes': {'size': 'L'}}}) self.cmp.compare.find({'name': 'bob'}) def test__pull_nested_list(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one( {'name': 'bob', 'hat': [{'name': 'derby', 'sizes': ['L', 'XL']}, {'name': 'cap', 'sizes': ['S', 'L']}]}) self.cmp.do.update_many( {'hat': {'$elemMatch': {'name': 'derby'}}}, {'$pull': {'hat.$.sizes': 'XL'}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.delete_many({}) self.cmp.do.insert_one( {'name': 'bob', 'hat': {'nested': ['element1', 'element2', 'element1']}}) self.cmp.do.update_many({'name': 'bob'}, {'$pull': {'hat.nested': 'element1'}}) self.cmp.compare.find({'name': 'bob'}) def test__pullAll(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$pullAll': {'hat': ['green']}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many( {'name': 'bob'}, {'$pullAll': {'hat': ['green', 'blue']}}) self.cmp.compare.find({'name': 'bob'}) self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': ['green', 'tall', 'blue']}) self.cmp.do.update_many({'name': 'bob'}, {'$pullAll': {'hat': ['green']}}) self.cmp.compare.find({'name': 'bob'}) def test__pullAll_dollar_operand(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'takes': [ {'a': 1, 'tags': [0, 1, 2, 4]}, {'a': 2, 'tags': [0, 1, 2, 4]}, {'a': 1, 'tags': [0, 1, 4]}, {'a': 1, 'tags': [2, 3, 5]}]}) self.cmp.do.update_many( {'name': 'bob', 'takes': {'$elemMatch': {'a': 1}}}, {'$pullAll': {'takes.$.tags': [1, 2, 3]}}) self.cmp.compare.find({'name': 'bob'}) def test__push(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': ['green', 'tall']}) self.cmp.do.update_many({'name': 'bob'}, {'$push': {'hat': 'wide'}}) self.cmp.compare.find({'name': 'bob'}) def test__push_dict(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one( {'name': 'bob', 'hat': [{'name': 'derby', 'sizes': ['L', 'XL']}]}) self.cmp.do.update_many( {'name': 'bob'}, {'$push': {'hat': {'name': 'cap', 'sizes': ['S', 'L']}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_each(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': ['green', 'tall']}) self.cmp.do.update_many( {'name': 'bob'}, {'$push': {'hat': {'$each': ['wide', 'blue']}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_nested_dict(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({ 'name': 'bob', 'hat': [ {'name': 'derby', 'sizes': [{'size': 'L', 'quantity': 3}, {'size': 'XL', 'quantity': 4}], 'colors': ['green', 'blue']}, {'name': 'cap', 'sizes': [{'size': 'S', 'quantity': 10}, {'size': 'L', 'quantity': 5}], 'colors': ['blue']}]}) self.cmp.do.update_many( {'hat': {'$elemMatch': {'name': 'derby'}}}, {'$push': {'hat.$.sizes': {'size': 'M', 'quantity': 6}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_nested_dict_each(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({ 'name': 'bob', 'hat': [ {'name': 'derby', 'sizes': [{'size': 'L', 'quantity': 3}, {'size': 'XL', 'quantity': 4}], 'colors': ['green', 'blue']}, {'name': 'cap', 'sizes': [{'size': 'S', 'quantity': 10}, {'size': 'L', 'quantity': 5}], 'colors': ['blue']}]}) self.cmp.do.update_many( {'hat': {'$elemMatch': {'name': 'derby'}}}, {'$push': {'hat.$.sizes': {'$each': [{'size': 'M', 'quantity': 6}, {'size': 'S', 'quantity': 1}]}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_nested_dict_in_list(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({ 'name': 'bob', 'hat': [ {'name': 'derby', 'sizes': [{'size': 'L', 'quantity': 3}, {'size': 'XL', 'quantity': 4}], 'colors': ['green', 'blue']}, {'name': 'cap', 'sizes': [{'size': 'S', 'quantity': 10}, {'size': 'L', 'quantity': 5}], 'colors': ['blue']}]}) self.cmp.do.update_many( {'name': 'bob'}, {'$push': {'hat.1.sizes': {'size': 'M', 'quantity': 6}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_nested_list_each(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({ 'name': 'bob', 'hat': [ {'name': 'derby', 'sizes': ['L', 'XL'], 'colors': ['green', 'blue']}, {'name': 'cap', 'sizes': ['S', 'L'], 'colors': ['blue']} ] }) self.cmp.do.update_many( {'hat': {'$elemMatch': {'name': 'derby'}}}, {'$push': {'hat.$.sizes': {'$each': ['M', 'S']}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_nested_attribute(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': {'data': {'sizes': ['XL']}}}) self.cmp.do.update_many({'name': 'bob'}, {'$push': {'hat.data.sizes': 'L'}}) self.cmp.compare.find({'name': 'bob'}) def test__push_nested_attribute_each(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob', 'hat': {}}) self.cmp.do.update_many( {'name': 'bob'}, {'$push': {'hat.first': {'$each': ['a', 'b']}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_to_absent_nested_attribute(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$push': {'hat.data.sizes': 'L'}}) self.cmp.compare.find({'name': 'bob'}) def test__push_to_absent_field(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many({'name': 'bob'}, {'$push': {'hat': 'wide'}}) self.cmp.compare.find({'name': 'bob'}) def test__push_each_to_absent_field(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'name': 'bob'}) self.cmp.do.update_many( {'name': 'bob'}, {'$push': {'hat': {'$each': ['wide', 'blue']}}}) self.cmp.compare.find({'name': 'bob'}) def test__push_each_slice(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'scores': [40, 50, 60]}) self.cmp.do.update_one({}, {'$push': {'scores': { '$each': [80, 78, 86], '$slice': -5, }}}) self.cmp.compare.find() self.cmp.do.update_one({}, {'$push': {'scores': { '$each': [100, 20], '$slice': 3, }}}) self.cmp.compare.find() self.cmp.do.update_one({}, {'$push': {'scores': { '$each': [], '$slice': 2, }}}) self.cmp.compare.find() self.cmp.do.update_one({}, {'$push': {'scores': { '$each': [25, 15], '$slice': 0, }}}) self.cmp.compare.find() def test__update_push_slice_nested_field(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'games': [{'scores': [40, 50, 60]}, {'a': 1}]}) self.cmp.do.update_one({}, {'$push': {'games.0.scores': { '$each': [80, 78, 86], '$slice': -5, }}}) self.cmp.compare.find() self.cmp.do.update_one( {'games': {'$elemMatch': {'scores': {'$exists': True}}}}, {'$push': {'games.$.scores': {'$each': [0, 1], '$slice': -5}}}, ) self.cmp.compare.find() def test__update_push_array_of_arrays(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one({'scores': [[40, 50], [60, 20]]}) self.cmp.do.update_one( {'scores': {'$elemMatch': {'0': 60}}}, {'$push': {'scores.$': 30}}, ) self.cmp.compare.find() def test__update_push_sort(self): self.cmp.do.delete_many({}) self.cmp.do.insert_one( {'a': {'b': [{'value': 3}, {'value': 1}, {'value': 2}]}}) self.cmp.do.update_one({}, {'$push': {'a.b': { '$each': [{'value': 4}], '$sort': {'value': 1}, }}}) self.cmp.compare.find() def _compare_update_push_position(self, position): self.cmp.do.delete_many({}) self.cmp.do.insert_one( {'a': {'b': [{'value': 3}, {'value': 1}, {'value': 2}]}}) self.cmp.do.update_one({}, {'$push': {'a.b': { '$each': [{'value': 4}], '$position': position, }}}) self.cmp.compare.find() def test__update_push_position(self): self._compare_update_push_position(0) self._compare_update_push_position(1) self._compare_update_push_position(5) # TODO(pascal): Enable once we test against Mongo v3.6+ # self._compare_update_push_position(-2) def test__drop(self): self.cmp.do.insert_one({'name': 'another new'}) self.cmp.do.drop() self.cmp.compare.find({}) def test__ensure_index(self): if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.ensure_index('name') return self.cmp.compare.ensure_index('name') self.cmp.compare.ensure_index('hat', cache_for=100) self.cmp.compare.ensure_index([('name', 1), ('hat', -1)]) self.cmp.do.insert_one({}) self.cmp.compare.index_information() def test__drop_index(self): self.cmp.do.insert_one({}) self.cmp.compare.create_index([('name', 1), ('hat', -1)]) self.cmp.compare.drop_index([('name', 1), ('hat', -1)]) self.cmp.compare.index_information() def test__drop_index_by_name(self): self.cmp.do.insert_one({}) results = self.cmp.compare.create_index('name') self.cmp.compare.drop_index(results['real']) self.cmp.compare.index_information() def test__index_information(self): self.cmp.do.insert_one({}) self.cmp.compare.index_information() def test__list_indexes(self): self.cmp.do.insert_one({}) self.cmp.compare_ignore_order.sort_by(lambda i: i['name']).list_indexes() def test__empty_logical_operators(self): for operator in ('$or', '$and', '$nor'): self.cmp.compare_exceptions.find({operator: []}) def test__rename(self): input_ = {'_id': 1, 'foo': 'bar'} self.cmp.do.insert_one(input_) query = {'_id': 1} update = {'$rename': {'foo': 'bar'}} self.cmp.do.update_one(query, update=update) self.cmp.compare.find() def test__rename_collection(self): self.cmp.do.insert_one({'_id': 1, 'foo': 'bar'}) self.cmp.compare.rename('new_name') self.cmp.compare.find() def test__set_equals(self): self.cmp.do.insert_many([ {'array': ['one', 'three']}, ]) self.cmp.compare.aggregate([{'$project': { '_id': 0, 'same_array': {'$setEquals': ['$array', '$array']}, 'eq_array': {'$setEquals': [['one', 'three'], '$array']}, 'ne_array': {'$setEquals': [['one', 'two'], '$array']}, 'eq_in_another_order': {'$setEquals': [['one', 'two'], ['two', 'one']]}, 'ne_in_another_order': {'$setEquals': [['one', 'two'], ['three', 'one', 'two']]}, 'three_equal': {'$setEquals': [['one', 'two'], ['two', 'one'], ['one', 'two']]}, 'three_not_equal': {'$setEquals': [['one', 'three'], ['two', 'one'], ['two', 'one']]}, }}]) @skipIf( helpers.PYMONGO_VERSION < version.parse('4.0'), 'pymongo v4 dropped map reduce methods') def test__map_reduce_fails(self): self.cmp.compare_exceptions.map_reduce(Code(''), Code(''), 'myresults') self.cmp.compare_exceptions.inline_map_reduce(Code(''), Code('')) self.cmp.compare_exceptions.group(['a'], {'a': {'$lt': 3}}, {'count': 0}, Code(''' function(cur, result) { result.count += cur.count } ''')) @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 dropped group method') @skipIf(helpers.PYMONGO_VERSION < version.parse('3.6'), 'pymongo v3.6 broke group method') def test__group_fails(self): self.cmp.compare_exceptions.group(['a'], {'a': {'$lt': 3}}, {'count': 0}, Code(''' function(cur, result) { result.count += cur.count } ''')) def test__aggregate_system_variables_generate_array(self): self.cmp.do.drop() self.cmp.do.insert_one( {'name': 'foo', 'errors': [ {'error_type': 1, 'description': 'problem 1'}, {'error_type': 2, 'description': 'problem 2'}]}) self.cmp.compare.aggregate([{'$project': {'error_type': '$$ROOT.errors.error_type'}}]) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') @skipIf(not _HAVE_MAP_REDUCE, 'execjs not installed') @skipIf(helpers.PYMONGO_VERSION >= version.parse('4.0'), 'pymongo v4 dropped map reduce') class CollectionMapReduceTest(TestCase): def setUp(self): self.db = mongomock.MongoClient().map_reduce_test self.data = [{'x': 1, 'tags': ['dog', 'cat']}, {'x': 2, 'tags': ['cat']}, {'x': 3, 'tags': ['mouse', 'cat', 'dog']}, {'x': 4, 'tags': []}] for item in self.data: self.db.things.insert_one(item) self.map_func = Code(''' function() { this.tags.forEach(function(z) { emit(z, 1); }); }''') self.reduce_func = Code(''' function(key, values) { var total = 0; for(var i = 0; i= version.parse('3.6'), 'pymongo v3.6 broke group') class GroupTest(_CollectionComparisonTest): def setUp(self): _CollectionComparisonTest.setUp(self) self._id1 = ObjectId() self.data = [ {'a': 1, 'count': 4}, {'a': 1, 'count': 2}, {'a': 1, 'count': 4}, {'a': 2, 'count': 3}, {'a': 2, 'count': 1}, {'a': 1, 'count': 5}, {'a': 4, 'count': 4}, {'b': 4, 'foo': 4}, {'b': 2, 'foo': 3, 'name': 'theone'}, {'b': 1, 'foo': 2}, {'b': 1, 'foo': self._id1}, ] self.cmp.do.insert_many(self.data) def test__group1(self): key = ['a'] initial = {'count': 0} condition = {'a': {'$lt': 3}} reduce_func = Code(''' function(cur, result) { result.count += cur.count } ''') self.cmp.compare.group(key, condition, initial, reduce_func) def test__group2(self): reduce_func = Code(''' function(cur, result) { result.count += 1 } ''') self.cmp.compare.group( key=['b'], condition={'foo': {'$in': [3, 4]}, 'name': 'theone'}, initial={'count': 0}, reduce=reduce_func) def test__group3(self): reducer = Code(''' function(obj, result) {result.count+=1 } ''') conditions = {'foo': {'$in': [self._id1]}} self.cmp.compare.group( key=['foo'], condition=conditions, initial={'count': 0}, reduce=reducer) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') class MongoClientAggregateTest(_CollectionComparisonTest): def setUp(self): super(MongoClientAggregateTest, self).setUp() self.data = [ {'_id': ObjectId(), 'a': 1, 'b': 1, 'count': 4, 'swallows': ['European swallow'], 'date': datetime.datetime(2015, 10, 1, 10, 0)}, {'_id': ObjectId(), 'a': 1, 'b': 1, 'count': 2, 'swallows': ['African swallow'], 'date': datetime.datetime(2015, 12, 1, 12, 0)}, {'_id': ObjectId(), 'a': 1, 'b': 2, 'count': 4, 'swallows': ['European swallow'], 'date': datetime.datetime(2014, 10, 2, 12, 0)}, {'_id': ObjectId(), 'a': 2, 'b': 2, 'count': 3, 'swallows': ['African swallow', 'European swallow'], 'date': datetime.datetime(2015, 1, 2, 10, 0)}, {'_id': ObjectId(), 'a': 2, 'b': 3, 'count': 1, 'swallows': [], 'date': datetime.datetime(2013, 1, 3, 12, 0)}, {'_id': ObjectId(), 'a': 1, 'b': 4, 'count': 5, 'swallows': ['African swallow', 'European swallow'], 'date': datetime.datetime(2015, 8, 4, 12, 0)}, {'_id': ObjectId(), 'a': 4, 'b': 4, 'count': 4, 'swallows': ['unladen swallow'], 'date': datetime.datetime(2014, 7, 4, 13, 0)}] for item in self.data: self.cmp.do.insert_one(item) def test__aggregate1(self): pipeline = [ {'$match': {'a': {'$lt': 3}}}, {'$sort': {'_id': -1}}, ] self.cmp.compare.aggregate(pipeline) def test__aggregate2(self): pipeline = [ {'$group': {'_id': '$a', 'count': {'$sum': '$count'}}}, {'$match': {'a': {'$lt': 3}}}, {'$sort': {'_id': -1, 'count': 1}}, ] self.cmp.compare.aggregate(pipeline) def test__aggregate3(self): pipeline = [ {'$group': {'_id': 'a', 'count': {'$sum': '$count'}}}, {'$match': {'a': {'$lt': 3}}}, {'$sort': {'_id': -1, 'count': 1}}, {'$skip': 1}, {'$limit': 2}] self.cmp.compare.aggregate(pipeline) def test__aggregate4(self): pipeline = [ {'$unwind': '$swallows'}, {'$sort': {'count': -1, 'swallows': -1}}] self.cmp.compare.aggregate(pipeline) def test__aggregate5(self): pipeline = [ {'$group': {'_id': {'id_a': '$a'}, 'total': {'$sum': '$count'}, 'avg': {'$avg': '$count'}}}, {'$sort': {'_id.a': 1, 'total': 1, 'avg': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate6(self): pipeline = [ {'$group': {'_id': {'id_a': '$a', 'id_b': '$b'}, 'total': {'$sum': '$count'}, 'avg': {'$avg': '$count'}}}, {'$sort': {'_id.id_a': 1, '_id.id_b': 1, 'total': 1, 'avg': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate7(self): pipeline = [ {'$group': {'_id': {'id_a': '$a', 'id_b': {'$year': '$date'}}, 'total': {'$sum': '$count'}, 'avg': {'$avg': '$count'}}}, {'$sort': {'_id.id_a': 1, '_id.id_b': 1, 'total': 1, 'avg': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate8(self): pipeline = [ {'$group': {'_id': None, 'counts': {'$sum': '$count'}}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate9(self): pipeline = [ {'$group': {'_id': {'id_a': '$a'}, 'total': {'$sum': '$count'}, 'avg': {'$avg': '$count'}}}, {'$group': {'_id': None, 'counts': {'$sum': '$total'}}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate10(self): # group on compound index self.cmp.do.delete_many({}) data = [ {'_id': ObjectId(), 'key_1': {'sub_key_1': 'value_1'}, 'nb': 1}, {'_id': ObjectId(), 'key_1': {'sub_key_1': 'value_2'}, 'nb': 1}, {'_id': ObjectId(), 'key_1': {'sub_key_1': 'value_1'}, 'nb': 2} ] for item in data: self.cmp.do.insert_one(item) pipeline = [ {'$group': {'_id': '$key_1.sub_key_1', 'nb': {'$sum': '$nb'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate11(self): pipeline = [ {'$group': {'_id': None, 'max_count': {'$max': '$count'}, 'min_count': {'$min': '$count'}}}, ] self.cmp.compare.aggregate(pipeline) def test__aggregate12(self): pipeline = [ {'$group': {'_id': '$a', 'max_count': {'$max': '$count'}, 'min_count': {'$min': '$count'}}}, {'$sort': {'_id': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate13(self): pipeline = [ {'$sort': {'date': 1}}, {'$group': {'_id': None, 'last_date': {'$last': '$date'}, 'first_date': {'$first': '$date'}}}, ] self.cmp.compare.aggregate(pipeline) def test__aggregate_on_no_data(self): pipeline = [ {'$sort': {'date': 1}}, {'$group': { '_id': None, 'last_unkown': {'$last': '$unkown_field'}, 'first_unknown': {'$first': '$unknown_field'}, }}, ] self.cmp.compare.aggregate(pipeline) def test__aggregate14(self): pipeline = [ {'$sort': {'date': 1}}, {'$group': {'_id': '$a', 'last_date': {'$last': '$date'}, 'first_date': {'$first': '$date'}}}, {'$sort': {'_id': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate_group_by_dbref(self): self.cmp.do.insert_many([ {'myref': DBRef('a', '1')}, {'myref': DBRef('a', '1')}, {'myref': DBRef('a', '2')}, {'myref': DBRef('b', '1')}, ]) self.cmp.compare.aggregate([ {'$group': {'_id': '$myref'}} ]) def test__aggregate_project_include_in_inclusion(self): pipeline = [ {'$project': {'a': 1, 'b': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate_project_exclude_in_exclusion(self): pipeline = [ {'$project': {'a': 0, 'b': 0}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate_project_exclude_id_in_inclusion(self): pipeline = [ {'$project': {'a': 1, '_id': 0}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate_project_with_subfields(self): self.cmp.do.insert_many([ {'a': {'b': 3}, 'other': 1}, {'a': {'c': 3}}, {'b': {'c': 3}}, {'a': 5}, ]) pipeline = [ {'$project': {'a.b': 1}} ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate_project_with_subfields_exclude(self): self.cmp.do.insert_many([ {'a': {'b': 3}, 'other': 1}, {'a': {'b': 3, 'd': 5}}, {'a': {'c': 3, 'd': 5}}, {'b': {'c': 3}}, {'a': 5}, ]) pipeline = [ {'$project': {'a.b': 0}} ] self.cmp.compare_ignore_order.aggregate(pipeline) def test_aggregate_project_with_missing_subfields(self): self.cmp.do.insert_many([ {'a': {'b': 3}, 'other': 1}, {'a': {'b': {'c': 4}, 'd': 5}}, {'a': {'c': 3, 'd': 5}}, {'b': {'c': 3}}, {'a': 5}, ]) pipeline = [ {'$project': {'_id': False, 'e': '$a.b.c'}} ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate_unwind_project_id(self): self.cmp.do.insert_one({ '_id': 'id0', 'c2': [ {'_id': 'id1', 'o': 'x'}, {'_id': 'id2', 'o': 'y'}, {'_id': 'id3', 'o': 'z'}, ], }) pipeline = [ {'$unwind': '$c2'}, {'$project': {'_id': '$c2._id', 'o': '$c2.o'}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate17(self): pipeline = [ {'$project': {'_id': 0, 'created': {'$subtract': [{'$min': ['$a', '$b']}, '$count']}}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate18(self): pipeline = [ {'$project': {'_id': 0, 'created': {'$subtract': ['$a', '$b']}}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate19(self): pipeline = [ {'$project': {'_id': 0, 'created': {'$subtract': ['$a', 1]}}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate20(self): pipeline = [{'$project': { '_id': 0, 'abs': {'$abs': '$b'}, 'add': {'$add': ['$a', 1, '$b']}, 'ceil': {'$ceil': 8.35}, 'div': {'$divide': ['$a', 1]}, 'exp': {'$exp': 2}, 'floor': {'$floor': 4.65}, 'ln': {'$ln': 100}, 'log': {'$log': [8, 2]}, 'log10': {'$log10': 1000}, 'mod': {'$mod': [46, 9]}, 'multiply': {'$multiply': [5, '$a', '$b']}, 'pow': {'$pow': [4, 2]}, 'sqrt': {'$sqrt': 100}, 'trunc': {'$trunc': 8.35}, }}] self.cmp.compare.aggregate(pipeline) def test__aggregate21(self): pipeline = [ {'$group': {'_id': '$a', 'count': {'$sum': 1}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate22(self): pipeline = [ {'$group': {'_id': {'$gte': ['$a', 2]}, 'total': {'$sum': '$count'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate23(self): # make sure we aggregate compound keys correctly pipeline = [ {'$group': {'_id': {'id_a': '$a', 'id_b': '$b'}, 'total': {'$sum': '$count'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate24(self): # make sure we aggregate zero rows correctly pipeline = [ {'$match': {'_id': '123456'}}, {'$group': {'_id': {'$eq': ['$a', 1]}, 'total': {'$sum': '$count'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate25(self): pipeline = [ {'$group': {'_id': {'$eq': [{'$year': '$date'}, 2015]}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate26(self): pipeline = [ {'$group': {'_id': {'$eq': [{'$year': '$date'}, 2015]}, 'total': {'$sum': '$count'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate27(self): # test $lookup stage pipeline = [ {'$lookup': { 'from': self.collection_name, 'localField': 'a', 'foreignField': 'b', 'as': 'lookup' }} ] self.cmp.compare.aggregate(pipeline) def test__aggregate27b(self): # test $graphLookup stage self.cmp.do.delete_many({}) data = [ {'_id': ObjectId(), 'name': 'a', 'child': 'b', 'val': 2}, {'_id': ObjectId(), 'name': 'b', 'child': 'c', 'val': 3}, {'_id': ObjectId(), 'name': 'c', 'child': None, 'val': 4}, {'_id': ObjectId(), 'name': 'd', 'child': 'a', 'val': 5} ] for item in data: self.cmp.do.insert_one(item) pipeline = [ {'$match': {'name': 'a'}}, {'$graphLookup': { 'from': self.collection_name, 'startWith': '$child', 'connectFromField': 'child', 'connectToField': 'name', 'as': 'lookup' }}, {'$unwind': '$lookup'}, {'$sort': {'lookup.name': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate28(self): pipeline = [{'$group': { '_id': '$b', 'total2015': {'$sum': {'$cond': [{'$ne': [{'$year': '$date'}, 2015]}, 0, 1]}}, }}] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate29(self): # group addToSet pipeline = [ {'$group': {'_id': '$a', 'nb': {'$addToSet': '$count'}}}, {'$sort': {'_id': 1}} ] # self.cmp.compare cannot be used as addToSet returns elements in an unpredictable order aggregations = self.cmp.do.aggregate(pipeline) expected = list(aggregations['real']) result = list(aggregations['fake']) self.assertEqual(len(result), len(expected)) for expected_elt, result_elt in zip(expected, result): self.assertCountEqual(expected_elt.keys(), result_elt.keys()) for key in result_elt: if isinstance(result_elt[key], list): self.assertCountEqual(result_elt[key], expected_elt[key], msg=key) else: self.assertEqual(result_elt[key], expected_elt[key], msg=key) def test__aggregate30(self): # group addToSet dict element self.cmp.do.delete_many({}) data = [ {'a': {'c': '1', 'd': 1}, 'b': {'c': '2', 'd': 2}}, {'a': {'c': '1', 'd': 3}, 'b': {'c': '4', 'd': 4}}, {'a': {'c': '5', 'd': 1}, 'b': {'c': '6', 'd': 6}}, {'a': {'c': '5', 'd': 2}, 'b': {'c': '6', 'd': 6}} ] self.cmp.do.insert_many(data) pipeline = [ {'$group': {'_id': 'a.c', 'nb': {'$addToSet': 'b'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate31(self): # group addToSet creating dict pipeline = [ {'$group': {'_id': '$count', 'set': {'$addToSet': {'a': '$a', 'b': '$b'}}}}, ] # self.cmp.compare cannot be used as addToSet returns elements in an unpredictable order aggregations = self.cmp.do.aggregate(pipeline) expected = list(aggregations['real']) result = list(aggregations['fake']) self.assertEqual(len(result), len(expected)) set_expected = set([ tuple(sorted(e.items())) for elt in expected for e in elt['set'] ]) set_result = set([ tuple(sorted(e.items())) for elt in result for e in elt['set'] ]) self.assertEqual(set_result, set_expected) def test__aggregate_add_to_set_missing_value(self): self.cmp.do.delete_many({}) data = [ {'a': {'c': '1', 'd': 1}, 'b': 1}, {'a': {'c': '1', 'd': 2}} ] self.cmp.do.insert_many(data) pipeline = [ {'$group': {'_id': 'a.c', 'nb': {'$addToSet': 'b'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate32(self): self.cmp.do.drop() self.cmp.do.insert_many([ {'group': 'one'}, {'group': 'one'}, {'group': 'one', 'data': None}, {'group': 'one', 'data': 0}, {'group': 'one', 'data': 2}, {'group': 'one', 'data': {'a': 1}}, {'group': 'one', 'data': [1, 2]}, {'group': 'one', 'data': [3, 4]}, ]) pipeline = [{'$group': { '_id': '$group', 'count': {'$sum': 1}, 'countData': {'$sum': {'$cond': ['$data', 1, 0]}}, 'countDataExists': {'$sum': {'$cond': { 'if': {'$gt': ['$data', None]}, 'then': 1, 'else': 0, }}}, }}] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate33(self): self.cmp.do.drop() self.cmp.do.insert_one({'_id': 1, 'a': 2, 'b': 3, 'c': '$d'}) pipeline = [{'$project': { '_id': 0, 'max': {'$max': [5, 9, '$a', None]}, 'min': {'$min': [8, 2, None, 3, '$a', '$b']}, 'avg': {'$avg': [4, 2, None, 3, '$a', '$b', 4]}, 'sum': {'$sum': [4, 2, None, 3, '$a', '$b', {'$sum': [0, 1, '$b']}]}, 'maxString': {'$max': [{'$literal': '$b'}, '$c']}, 'maxNone': {'$max': [None, None]}, 'minNone': {'$min': [None, None]}, 'avgNone': {'$avg': ['a', None]}, 'sumNone': {'$sum': ['a', None]}, }}] self.cmp.compare.aggregate(pipeline) def test__aggregate34(self): self.cmp.do.drop() self.cmp.do.insert_one({'_id': 1, 'a': 'Hello', 'b': 'World'}) pipeline = [{'$project': { '_id': 0, 'concat': {'$concat': ['$a', ' Dear ', '$b']}, 'concat_none': {'$concat': ['$a', None, '$b']}, 'sub1': {'$substr': ['$a', 0, 4]}, 'lower': {'$toLower': '$a'}, 'lower_err': {'$toLower': None}, 'split_string_none': {'$split': [None, 'l']}, 'split_string_missing': {'$split': ['$missingField', 'l']}, 'split_delimiter_none': {'$split': ['$a', None]}, 'split_delimiter_missing': {'$split': ['$a', '$missingField']}, 'split': {'$split': ['$a', 'l']}, 'strcasecmp': {'$strcasecmp': ['$a', '$b']}, 'upper': {'$toUpper': '$a'}, 'upper_err': {'$toUpper': None}, }}] self.cmp.compare.aggregate(pipeline) def test__aggregate_regexpmatch(self): self.cmp.do.insert_many([ {'_id': 1, 'description': 'Single LINE description.'}, {'_id': 2, 'description': 'First lines\nsecond line'}, {'_id': 3, 'description': 'Many spaces before line'}, {'_id': 4, 'description': 'Multiple\nline descriptions'}, {'_id': 5, 'description': 'anchors, links and hyperlinks'}, {'_id': 6, 'description': u'métier work vocation'} ]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': 'line'}}, }}]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': 'lin(e|k)'}}, }}]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': 'line', 'options': 'i'}}, }}]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': Regex('line', 'i')}}, }}]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': { 'input': '$description', 'regex': 'line(e|k) # matches line or link', 'options': 'x', }}, }}]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': { 'input': '$description', 'regex': 'm.*line', 'options': 'si', }}, }}]) # Missing fields self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$missing', 'regex': 'line'}}, }}]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': '$missing'}}, }}]) # Exceptions self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': ['$description', 'line']}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': {'inut': '$description', 'regex': 'line'}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': 'line', 'other': True}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': 42, 'regex': 'line'}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': 'line', 'options': '?'}}, }}]) self.cmp.compare.aggregate([{'$addFields': { 'result': {'$regexMatch': { 'input': '$description', 'regex': Regex('line'), 'options': 'i'}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': { 'input': '$description', 'regex': re.compile('line', re.U), 'options': 'i'}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': { 'input': '$description', 'regex': re.compile('line', re.U)}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': { 'input': '$description', 'regex': Regex('line', 'i'), 'options': 'i'}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': { 'input': '$description', 'regex': Regex('line', 'u')}}, }}]) self.cmp.compare_exceptions.aggregate([{'$addFields': { 'result': {'$regexMatch': {'input': '$description', 'regex': 5}}, }}]) def test__aggregate35(self): self.cmp.do.drop() self.cmp.do.insert_one({ '_id': 1, 'a': 2, 'b': 3, 'c': '$d', 'd': decimal128.Decimal128('4') }) pipeline = [{'$project': { '_id': 0, 'sum': {'$sum': [4, 2, None, 3, '$a', '$b', '$d', {'$sum': [0, 1, '$b']}]}, 'sumNone': {'$sum': ['a', None]}, }}] self.cmp.compare.aggregate(pipeline) def test__aggregate_project_id_0(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'_id': 4}, {'a': 5}, {}, ]) pipeline = [{'$project': {'_id': 0}}] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate_project_array_subfield(self): self.cmp.do.insert_many([ {'_id': 1, 'a': [{'b': 1, 'c': 2, 'd': 3}], 'e': 4}, {'_id': 2, 'a': [{'c': 12, 'd': 13}], 'e': 14}, {'_id': 3, 'a': [{'b': 21, 'd': 23}], 'e': 24}, {'_id': 4, 'a': [{'b': 31, 'c': 32}], 'e': 34}, {'_id': 5, 'a': [{'b': 41}], 'e': 44}, {'_id': 6, 'a': [{'c': 51}], 'e': 54}, {'_id': 7, 'a': [{'d': 51}], 'e': 54}, {'_id': 8, 'a': [ {'b': 61, 'c': 62, 'd': 63}, 65, 'foobar', {'b': 66, 'c': 67, 'd': 68}], 'e': 64}, {'_id': 9, 'a': []}, {'_id': 10, 'a': [1, 2, 3, 4]}, {'_id': 11, 'a': 'foobar'}, {'_id': 12, 'a': 5}, ]) pipeline = [{'$project': {'a.b': 1, 'a.c': 1}}] self.cmp.compare_ignore_order.aggregate(pipeline) def test__aggregate_project_array_size_missing(self): self.cmp.do.insert_one({'_id': 1}) self.cmp.compare_exceptions.aggregate([ {'$match': {'_id': 1}}, {'$project': {'a': {'$size': '$arr'}}}, ]) def test__aggregate_bucket(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ { '_id': 1, 'title': 'The Pillars of Society', 'artist': 'Grosz', 'year': 1926, 'price': 199.99, }, { '_id': 2, 'title': 'Melancholy III', 'artist': 'Munch', 'year': 1902, 'price': 200.00, }, { '_id': 3, 'title': 'Dancer', 'artist': 'Miro', 'year': 1925, 'price': 76.04, }, { '_id': 4, 'title': 'The Great Wave off Kanagawa', 'artist': 'Hokusai', 'price': 167.30, }, { '_id': 5, 'title': 'The Persistence of Memory', 'artist': 'Dali', 'year': 1931, 'price': 483.00, }, { '_id': 6, 'title': 'Composition VII', 'artist': 'Kandinsky', 'year': 1913, 'price': 385.00, }, { '_id': 7, 'title': 'The Scream', 'artist': 'Munch', 'year': 1893, # No price }, { '_id': 8, 'title': 'Blue Flower', 'artist': "O'Keefe", 'year': 1918, 'price': 118.42, }, ]) self.cmp.compare.aggregate([{'$bucket': { 'groupBy': '$price', 'boundaries': [0, 200, 400], 'default': 'Other', 'output': { 'count': {'$sum': 1}, 'titles': {'$push': '$title'}, }, }}]) self.cmp.compare.aggregate([{'$bucket': { 'groupBy': '$price', 'boundaries': [0, 200, 400], 'default': 'Other', }}]) def test__aggregate_lookup_dot_in_local_field(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'_id': 2, 'should': {'do': 'join'}}, {'_id': 3, 'should': {'do': 'not_join'}}, {'_id': 4, 'should': 'skip'}, {'_id': 5, 'should': 'join'}, {'_id': 6, 'should': 'join'}, {'_id': 7, 'should': 'skip'}, ]) pipeline = [ {'$lookup': { 'from': self.collection_name, 'localField': 'should.do', 'foreignField': 'should', 'as': 'b' }} ] self.cmp.compare.aggregate(pipeline) def test__aggregate_count(self): self.cmp.do.insert_many([ {'_id': i} for i in range(5) ]) self.cmp.compare.aggregate([ {'$count': 'my_count'} ]) def test__aggregate_if_null(self): self.cmp.do.insert_one({'_id': 1, 'elem_a': ''}) self.cmp.compare.aggregate([ { '$project': { 'a': {'$ifNull': ['$elem_a', '']}, 'b': {'$ifNull': ['$elem_b', '']}, } } ]) def test__aggregate_if_null_multi_field(self): self.cmp.do.insert_one({'_id': 1, 'elem_a': ''}) # Multiple input expressions in $ifNull are not supported in MongoDB v4.4 and earlier. if SERVER_VERSION > version.parse('4.4'): compare = self.cmp.compare else: compare = self.cmp.compare_exceptions compare.aggregate([ { '$project': { 'a_and_b': {'$ifNull': ['$elem_a', '$elem_b', '']}, 'b_and_a': {'$ifNull': ['$elem_b', '$elem_a', '']}, 'b_and_c': {'$ifNull': ['$elem_b', '$elem_c', '']}, } } ]) def test__aggregate_is_number(self): self.cmp.do.insert_one( {'_id': 1, 'int': 3, 'big_int': 3 ** 10, 'negative': -3, 'str': 'not_a_number', 'str_numeric': '3', 'float': 3.3, 'negative_float': -3.3, 'bool': True, 'none': None}) self.cmp.compare.aggregate([ {'$project': { '_id': False, 'int': {'$isNumber': '$int'}, 'big_int': {'$isNumber': '$big_int'}, 'negative': {'$isNumber': '$negative'}, 'str': {'$isNumber': '$str'}, 'str_numeric': {'$isNumber': '$str_numeric'}, 'float': {'$isNumber': '$float'}, 'negative_float': {'$isNumber': '$negative_float'}, 'bool': {'$isNumber': '$bool'}, 'none': {'$isNumber': '$none'}, }} ]) def test__aggregate_is_array(self): self.cmp.do.insert_one( { '_id': 1, 'list': [1, 2, 3], 'tuple': (1, 2, 3), 'empty_list': [], 'empty_tuple': (), 'int': 3, 'str': '123', 'bool': True, 'none': None }) self.cmp.compare.aggregate([ {'$project': { '_id': False, 'list': {'$isArray': '$list'}, 'tuple': {'$isArray': '$tuple'}, 'empty_list': {'$isArray': '$empty_list'}, 'empty_tuple': {'$isArray': '$empty_tuple'}, 'int': {'$isArray': '$int'}, 'str': {'$isArray': '$str'}, 'bool': {'$isArray': '$bool'}, 'none': {'$isArray': '$none'} }} ]) def test__aggregate_facet(self): self.cmp.do.insert_many([ {'_id': i} for i in range(5) ]) self.cmp.compare.aggregate([ {'$facet': { 'pipeline_a': [{'$count': 'my_count'}], 'pipeline_b': [{'$group': {'_id': None}}]}} ]) def test__aggregate_project_rotate(self): self.cmp.do.insert_one({'_id': 1, 'a': 1, 'b': 2, 'c': 3}) self.cmp.compare.aggregate([ {'$project': {'a': '$b', 'b': '$a', 'c': 1}}, ]) def test__aggregate_unwind_options(self): self.cmp.do.drop() self.cmp.do.insert_many([ {'_id': 1, 'item': 'ABC', 'sizes': ['S', 'M', 'L']}, {'_id': 2, 'item': 'EFG', 'sizes': []}, {'_id': 3, 'item': 'IJK', 'sizes': 'M'}, {'_id': 4, 'item': 'LMN'}, {'_id': 5, 'item': 'XYZ', 'sizes': None}, ]) self.cmp.compare.aggregate([{'$unwind': {'path': '$sizes'}}]) self.cmp.compare.aggregate([ {'$unwind': {'path': '$sizes', 'includeArrayIndex': 'arrayIndex'}} ]) self.cmp.compare.aggregate([ {'$unwind': {'path': '$sizes', 'preserveNullAndEmptyArrays': True}}, ]) def test__aggregate_subtract_dates(self): self.cmp.compare.aggregate([{'$project': { '_id': 0, 'since': {'$subtract': ['$date', datetime.datetime(2014, 7, 4, 13, 0)]}, }}]) def test__aggregate_system_variables(self): self.cmp.do.drop() self.cmp.do.insert_many([ {'_id': 1}, {'_id': 2, 'parent_id': 1}, {'_id': 3, 'parent_id': 1}, ]) self.cmp.compare.aggregate([ {'$match': {'parent_id': {'$in': [1]}}}, {'$group': {'_id': 1, 'docs': {'$push': '$$ROOT'}}}, ]) def test__aggregate_date_operators(self): self.cmp.compare_ignore_order.aggregate([ {'$project': { 'doy': {'$dayOfYear': '$date'}, 'dom': {'$dayOfMonth': '$date'}, 'dow': {'$dayOfWeek': '$date'}, 'M': {'$month': '$date'}, 'w': {'$week': '$date'}, 'h': {'$hour': '$date'}, 'm': {'$minute': '$date'}, 's': {'$second': '$date'}, 'ms': {'$millisecond': '$date'}, }}, ]) def test__aggregate_in(self): self.cmp.compare_ignore_order.aggregate([ {'$project': { 'count': '$count', 'in': {'$in': ['$count', [1, 4, 5]]}, }}, ]) def test__aggregate_switch(self): self.cmp.compare_ignore_order.aggregate([ {'$project': { 'compare_with_3': { '$switch': { 'branches': [ {'case': {'$eq': ['$count', 3]}, 'then': 'equals 3'}, {'case': {'$gt': ['$count', 3]}, 'then': 'greater than 3'}, {'case': {'$lt': ['$count', 3]}, 'then': 'less than 3'} ], } }, 'equals_3': { '$switch': { 'branches': [ {'case': {'$eq': ['$count', 3]}, 'then': 'equals 3'}, ], 'default': 'not equal', } }, 'missing_field': { '$switch': { 'branches': [ {'case': '$missing_field', 'then': 'first case'}, {'case': True, 'then': '$missing_field'}, ], 'default': 'did not match', } }, }}, ]) def test__aggregate_switch_mongodb_to_bool(self): def build_switch(case): return { '$switch': { 'branches': [ {'case': case, 'then': 't'}, ], 'default': 'f', } } self.cmp.compare_ignore_order.aggregate([ {'$project': { 'undefined_value': build_switch('$not_existing_field'), 'false_value': build_switch(False), 'null_value': build_switch(None), 'zero_value': build_switch(0), 'true_value': build_switch(True), 'one_value': build_switch(1), 'empty_string': build_switch(''), 'empty_list': build_switch([]), 'empty_dict': build_switch({}), }}, ]) def test__aggregate_bug_473(self): """Regression test for bug https://github.com/mongomock/mongomock/issues/473.""" self.cmp.do.drop() self.cmp.do.insert_one({ 'name': 'first', 'base_value': 100, 'values_list': [ {'updated_value': 5}, {'updated_value': 15}, ], }) self.cmp.compare.aggregate([ {'$project': { 'name': 1, '_id': 0, 'sum': {'$sum': [ '$base_value', {'$arrayElemAt': ['$values_list.updated_value', -1]}, ]}, }}, ]) def test__aggregate_array_eleme_at(self): self.cmp.do.drop() self.cmp.do.insert_many([ {'values_list': [1, 2]}, {'values_list': [1, 2, 3]}, ]) self.cmp.compare.aggregate([{ '$project': { 'first_user_id': {'$arrayElemAt': ['$values_list', 2]}, 'other_user_id': {'$arrayElemAt': ['$values_list', -1]}, }, }]) def test_aggregate_bug_607(self): """Regression test for bug https://github.com/mongomock/mongomock/issues/607.""" self.cmp.do.drop() self.cmp.do.insert_one({ 'index': 2, 'values': [0, 1, 5] }) self.cmp.compare.aggregate([ {'$project': { 'values_index': {'$arrayElemAt': ['$values', '$index']} }} ]) self.cmp.compare.aggregate([ {'$project': { 'values_index': {'$arrayElemAt': ['$values', {'$add': [1, 1]}]} }} ]) def test__aggregate_first_last_in_array(self): self.cmp.do.drop() self.cmp.do.insert_one({ 'values': [0, 1, 5] }) self.cmp.compare.aggregate([ {'$project': { 'first': {'$first': '$values'}, 'last': {'$last': '$values'}, }} ]) def test__aggregate_cond_mongodb_to_bool(self): """Regression test for bug https://github.com/mongomock/mongomock/issues/650""" self.cmp.compare_ignore_order.aggregate([ {'$project': { # undefined aka KeyError 'undefined_value': {'$cond': ['$not_existing_field', 't', 'f']}, 'false_value': {'$cond': [False, 't', 'f']}, 'null_value': {'$cond': [None, 't', 'f']}, 'zero_value': {'$cond': [0, 't', 'f']}, 'true_value': {'$cond': [True, 't', 'f']}, 'one_value': {'$cond': [1, 't', 'f']}, 'empty_string': {'$cond': ['', 't', 'f']}, 'empty_list': {'$cond': [[], 't', 'f']}, 'empty_dict': {'$cond': [{}, 't', 'f']}, }}, ]) def test__aggregate_concatArrays(self): self.cmp.do.drop() self.cmp.do.insert_one({ '_id': 1, 'a': [1, 2], 'b': ['foo', 'bar', 'baz'], 'c': { 'arr1': [123] } }) pipeline = [{ '$project': { '_id': 0, 'concat': {'$concatArrays': ['$a', ['#', '*'], '$c.arr1', '$b']}, 'concat_array_expression': {'$concatArrays': '$b'}, 'concat_tuples': {'$concatArrays': ((1, 2, 3), (1,))}, 'concat_none': {'$concatArrays': None}, 'concat_missing_field': {'$concatArrays': '$foo'}, 'concat_none_item': {'$concatArrays': ['$a', None, '$b']}, 'concat_missing_field_item': {'$concatArrays': [[1, 2, 3], '$c.arr2']} } }] self.cmp.compare.aggregate(pipeline) def test__aggregate_concatArrays_exceptions(self): self.cmp.do.drop() self.cmp.do.insert_one({ '_id': 1, 'a': { 'arr1': [123] } }) self.cmp.compare_exceptions.aggregate([{ '$project': { 'concat_parameter_not_array': {'$concatArrays': 42} } }]) self.cmp.compare_exceptions.aggregate([{ '$project': { 'concat_item_not_array': {'$concatArrays': [[1, 2], '$a']} } }]) def test__aggregate_filter(self): self.cmp.do.drop() self.cmp.do.insert_many([ { '_id': 0, 'items': [ {'item_id': 43, 'quantity': 2, 'price': 10}, {'item_id': 2, 'quantity': 1, 'price': 240}, ], }, { '_id': 1, 'items': [ {'item_id': 23, 'quantity': 3, 'price': 110}, {'item_id': 103, 'quantity': 4, 'price': 5}, {'item_id': 38, 'quantity': 1, 'price': 300}, ], }, { '_id': 2, 'items': [ {'item_id': 4, 'quantity': 1, 'price': 23}, ], }, ]) self.cmp.compare.aggregate([{'$project': {'filtered_items': {'$filter': { 'input': '$items', 'as': 'item', 'cond': {'$gte': ['$$item.price', 100]}, }}}}]) self.cmp.compare.aggregate([{'$project': {'filtered_items': {'$filter': { 'input': '$items', 'cond': {'$lt': ['$$this.price', 100]}, }}}}]) def test__aggregate_map(self): self.cmp.do.insert_one({ 'array': [1, 2, 3, 4], }) self.cmp.compare.aggregate([{'$project': { '_id': 0, 'array': {'$map': { 'input': '$array', 'in': {'$multiply': ['$$this', '$$this']}, }}, 'custom_variable': {'$map': { 'input': '$array', 'as': 'self', 'in': {'$multiply': ['$$self', '$$self']}, }}, 'empty': {'$map': { 'input': [], 'in': {'$multiply': ['$$this', '$$this']}, }}, 'null': {'$map': { 'input': None, 'in': '$$this', }}, 'missing': {'$map': { 'input': '$missing.key', 'in': '$$this', }}, }}]) def test__aggregate_filter_in_arrayElemAt(self): self.cmp.do.drop() self.cmp.do.insert_many([ { '_id': 0, 'items': [ {'item_id': 11, 'category': 'book'}, {'item_id': 234, 'category': 'journal'} ] }, { '_id': 1, 'items': [ {'item_id': 23, 'category': 'book'} ] }, { '_id': 2, 'items': [ {'item_id': 232, 'category': 'book'} ] } ]) self.cmp.compare.aggregate([{'$project': {'item': { '$arrayElemAt': [ {'$filter': { 'input': '$items', 'cond': {'$eq': ['$$this.category', 'book']} }}, 0 ] }}}]) def test__aggregate_slice(self): self.cmp.do.drop() self.cmp.do.insert_many([ { '_id': 0, 'items': list(range(10)), }, { '_id': 1, 'items': list(range(10, 20)), }, { '_id': 2, 'items': list(range(20, 30)), }, ]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 0] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 5] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 10] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 0, 1] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 0, 5] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 5, 1] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 5, 5] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', 0, 10000] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', -5] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', -10] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', -5, 5] }}}]) self.cmp.compare.aggregate([{'$project': {'slice': { '$slice': ['$items', -10, 5] }}}]) def test__aggregate_no_entries(self): pipeline = [ {'$match': {'a': {'$eq': 'Never going to happen'}}}, {'$out': 'new_collection'}, ] self.cmp.compare.aggregate(pipeline) cmp = self._create_compare_for_collection('new_collection') cmp.compare.find() def test__replace_root(self): self.cmp.do.drop() self.cmp.do.insert_many([ { '_id': 1, 'fruit': ['apples', 'oranges'], 'in_stock': {'oranges': 20, 'apples': 60}, 'on_order': {'oranges': 35, 'apples': 75}, }, { '_id': 2, 'vegetables': ['beets', 'yams'], 'in_stock': {'beets': 130, 'yams': 200}, 'on_order': {'beets': 90, 'yams': 145}, }, ]) self.cmp.compare.aggregate([{'$replaceRoot': {'newRoot': '$in_stock'}}]) def test__replace_root_new_document(self): self.cmp.do.drop() self.cmp.do.insert_many([ {'_id': 1, 'first_name': 'Gary', 'last_name': 'Sheffield', 'city': 'New York'}, {'_id': 2, 'first_name': 'Nancy', 'last_name': 'Walker', 'city': 'Anaheim'}, {'_id': 3, 'first_name': 'Peter', 'last_name': 'Sumner', 'city': 'Toledo'}, ]) self.cmp.compare.aggregate([{'$replaceRoot': {'newRoot': { 'full_name': {'$concat': ['$first_name', '$last_name']}, }}}]) def test__insert_date_with_timezone(self): self.cmp.do.insert_one({ 'dateNoTz': datetime.datetime(2000, 1, 1, 12, 30, 30, 12745), 'dateTz': datetime.datetime( 2000, 1, 1, 12, 30, 30, 12745, tzinfo=UTCPlus2()), }) self.cmp.compare.find_one() def test_aggregate_date_with_timezone(self): self.cmp.do.drop() self.cmp.do.insert_one({ 'start_date': datetime.datetime(2011, 11, 4, 0, 5, 23) }) pipeline = [ { '$addFields': { 'year': {'$year': { 'date': '$start_date', 'timezone': 'America/New_York'} }, 'week': {'$week': { 'date': '$start_date', 'timezone': 'America/New_York'} }, 'dayOfWeek': {'$dayOfWeek': { 'date': '$start_date', 'timezone': 'America/New_York'} }, } }, {'$project': {'_id': 0}}, ] self.cmp.compare.aggregate(pipeline) def test__aggregate_add_fields(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'a': 1, 'b': 2}, {}, {'nested': {'foo': 1}}, {'nested': 'not nested'}, ]) self.cmp.compare.aggregate([{'$addFields': { 'a': 3, 'c': {'$sum': [3, '$a', '$b']}, 'd': '$d', 'nested.foo': 5, }}]) def test__aggregate_add_fields_with_max_min(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'_id': 4, 'dates': [ datetime.datetime(2020, 1, 10), datetime.datetime(2020, 1, 5), datetime.datetime(2020, 1, 7) ]}, {'_id': 5, 'dates': []} ]) pipeline = [ {'$addFields': { 'max_date': {'$max': '$dates'}, 'min_date': {'$min': '$dates'} }} ] self.cmp.compare.aggregate(pipeline) def test__aggregate36(self): self.cmp.compare.aggregate([{'$project': {'c': {'$abs': -2}}}]) self.cmp.compare.aggregate([{'$project': {'d': {'$floor': 2.3}}}]) self.cmp.compare.aggregate([{'$project': {'e': {'$ln': None}}}]) self.cmp.compare.aggregate([{'$project': {'f': {'$exp': '$non_existent_key'}}}]) self.cmp.compare.aggregate([{'$project': {'g': {'$divide': [7, 3]}}}]) self.cmp.compare.aggregate([{'$project': {'h': {'$log': [None, 1]}}}]) self.cmp.compare.aggregate([{'$project': {'i': {'$mod': [1, None]}}}]) self.cmp.compare.aggregate([{'$project': {'j': {'$pow': [None, None]}}}]) self.cmp.compare.aggregate([{'$project': {'k': {'$subtract': [None, 1]}}}]) self.cmp.compare.aggregate([{'$project': {'k': {'$subtract': ['$non_existent_key', 1]}}}]) self.cmp.compare.aggregate([{'$project': {'o': {'$multiply': [4]}}}]) self.cmp.compare.aggregate([{'$project': {'p': {'$add': [1, 2, 3]}}}]) self.cmp.compare.aggregate([{'$project': {'s': {'$multiply': [1, None]}}}]) self.cmp.compare.aggregate([{'$project': {'t': {'$add': [None, 1]}}}]) self.cmp.compare.aggregate([{'$project': {'u': {'$multiply': ['$a', '$b', 4]}}}]) def test__aggregate_exception(self): self.cmp.compare_exceptions.aggregate([{'$project': {'c': {'$abs': [-2, 4]}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'c': {'$floor': []}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'c': {'$divide': 5}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'c': {'$log': [5]}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'c': {'$mod': [5, 3, 1]}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'c': {'$sum': []}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'c': {'$multiply': []}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'n': {'$add': '$a'}}}]) self.cmp.compare_exceptions.aggregate( [{'$project': {'q': {'$multiply': [1, '$non_existent_key']}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'r': {'$add': '$non_existent_key'}}}]) self.cmp.compare_exceptions.aggregate([{'$project': {'v': {'$multiply': '$b'}}}]) # TODO(pascal): Enable this test, for now it's not the same kind of error. # self.cmp.compare_exceptions.aggregate( # [{'$project': {'c': {'$add': ['$date', 1, '$date']}}}]) def test__aggregate_add_fields_with_sum_avg(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'_id': 4, 'values': [10, 5, 7]}, {'_id': 5, 'values': []} ]) pipeline = [ {'$addFields': { 'max_val': {'$sum': '$values'}, 'min_val': {'$avg': '$values'} }} ] self.cmp.compare.aggregate(pipeline) def test_aggregate_to_string(self): self.cmp.do.drop() self.cmp.do.insert_one({ '_id': ObjectId('5dd6a8f302c91829ef248162'), 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'date': datetime.datetime(2018, 3, 27, 0, 58, 51, 538000), }) pipeline = [ { '$addFields': { '_id': {'$toString': '$_id'}, 'boolean_true': {'$toString': '$boolean_true'}, 'boolean_false': {'$toString': '$boolean_false'}, 'integer': {'$toString': '$integer'}, 'date': {'$toString': '$date'}, 'none': {'$toString': '$notexist'} } } ] self.cmp.compare.aggregate(pipeline) def test__aggregate_to_decimal(self): self.cmp.do.drop() self.cmp.do.insert_one({ 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'double': 1.999, 'decimal': decimal128.Decimal128('5.5000'), 'str_base_10_numeric': '123', 'str_negative_number': '-23', 'str_decimal_number': '1.99', 'str_not_numeric': '123a123', 'datetime': datetime.datetime.utcfromtimestamp(0), }) pipeline = [ { '$addFields': { 'boolean_true': {'$toDecimal': '$boolean_true'}, 'boolean_false': {'$toDecimal': '$boolean_false'}, 'integer': {'$toDecimal': '$integer'}, 'double': {'$toDecimal': '$double'}, 'decimal': {'$toDecimal': '$decimal'}, 'str_base_10_numeric': {'$toDecimal': '$str_base_10_numeric'}, 'str_negative_number': {'$toDecimal': '$str_negative_number'}, 'str_decimal_number': {'$toDecimal': '$str_decimal_number'}, 'datetime': {'$toDecimal': '$datetime'}, 'not_exist_field': {'$toDecimal': '$not_exist_field'}, } }, {'$project': {'_id': 0}}, ] self.cmp.compare.aggregate(pipeline) def test_aggregate_to_int(self): self.cmp.do.drop() self.cmp.do.insert_one({ 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'double': 1.999, 'decimal': decimal128.Decimal128('5.5000') }) pipeline = [ { '$addFields': { 'boolean_true': {'$toInt': '$boolean_true'}, 'boolean_false': {'$toInt': '$boolean_false'}, 'integer': {'$toInt': '$integer'}, 'double': {'$toInt': '$double'}, 'decimal': {'$toInt': '$decimal'}, 'not_exist': {'$toInt': '$not_exist'}, } }, { '$project': { '_id': 0 } } ] self.cmp.compare.aggregate(pipeline) def test_aggregate_to_long(self): self.cmp.do.drop() self.cmp.do.insert_one({ 'boolean_true': True, 'boolean_false': False, 'integer': 100, 'double': 1.999, 'decimal': decimal128.Decimal128('5.5000') }) pipeline = [ { '$addFields': { 'boolean_true': {'$toLong': '$boolean_true'}, 'boolean_false': {'$toLong': '$boolean_false'}, 'integer': {'$toLong': '$integer'}, 'double': {'$toLong': '$double'}, 'decimal': {'$toLong': '$decimal'}, 'not_exist': {'$toLong': '$not_exist'}, } }, { '$project': { '_id': 0 } } ] self.cmp.compare.aggregate(pipeline) def test_aggregate_date_to_string(self): self.cmp.do.drop() self.cmp.do.insert_one({ 'start_date': datetime.datetime(2011, 11, 4, 0, 5, 23) }) pipeline = [ { '$addFields': { 'start_date': { '$dateToString': {'format': '%Y/%m/%d %H:%M', 'date': '$start_date'} } } }, {'$project': {'_id': 0}}, ] self.cmp.compare.aggregate(pipeline) def test_aggregate_date_from_parts(self): self.cmp.do.drop() self.cmp.do.insert_one({ 'start_date': datetime.datetime(2022, 8, 3, 16, 6, 0) }) additional_fields_pipeline = [ { '$addFields': { 'start_date': { '$dateFromParts': { 'year': {'$year': '$start_date'}, 'month': {'$month': '$start_date'}, 'day': {'$dayOfMonth': '$start_date'}, } } } }, {'$project': {'_id': 0}}, ] self.cmp.compare.aggregate(additional_fields_pipeline) def test_aggregate_array_to_object(self): self.cmp.do.drop() self.cmp.do.insert_many([{ 'items': [['a', 1], ['b', 2], ['c', 3], ['a', 4]] }, { 'items': (['a', 1], ['b', 2], ['c', 3], ['a', 4]) }, { 'items': [('a', 1), ('b', 2), ('c', 3), ('a', 4)] }, { 'items': (('a', 1), ('b', 2), ('c', 3), ('a', 4)) }, { 'items': [['a', 1], ('b', 2), ['c', 3], ('a', 4)] }, { 'items': (['a', 1], ('b', 2), ['c', 3], ('a', 4)) }, { 'items': [{'k': 'a', 'v': 1}, {'k': 'b', 'v': 2}, {'k': 'c', 'v': 3}, {'k': 'a', 'v': 4}] }, { 'items': [] }, { 'items': () }, { 'items': None }]) pipeline = [ { '$project': { 'items': { '$arrayToObject': '$items' }, 'not_exists': { '$arrayToObject': '$nothing' } } }, {'$project': {'_id': 0}}, ] self.cmp.compare.aggregate(pipeline) # All of these items should trigger an error items = [[ {'$addFields': {'items': ''}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': 100}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [['a', 'b', 'c'], ['d', 2]]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [['a'], ['b', 2]]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [[]]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [{'k': 'a', 'v': 1, 't': 't'}, {'k': 'b', 'v': 2}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [{'v': 1, 't': 't'}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [{}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ], [ {'$addFields': {'items': [['a', 1], {'k': 'b', 'v': 2}]}}, {'$project': {'items': {'$arrayToObject': '$items'}, '_id': 0}} ]] for item in items: self.cmp.compare_exceptions.aggregate(item) def test__create_duplicate_index(self): self.cmp.do.create_index([('value', 1)]) self.cmp.do.create_index([('value', 1)]) self.cmp.compare_exceptions.create_index([('value', 1)], unique=True) def test__partial_filter_expression_unique_index(self): self.cmp.do.delete_many({}) self.cmp.do.create_index( (('value', 1), ('partialFilterExpression_value', 1)), unique=True, partialFilterExpression={'partialFilterExpression_value': {'$exists': True}}) # We should be able to add documents with duplicated `value` if # partialFilterExpression_value isn't set. self.cmp.do.insert_one({'value': 4}) self.cmp.do.insert_one({'value': 4}) self.cmp.compare.find({'value': 4}) # We should be able to add documents with distinct `value` values and duplicated # `partialFilterExpression_value` value. self.cmp.do.insert_one({'partialFilterExpression_value': 1, 'value': 2}) self.cmp.do.insert_one({'partialFilterExpression_value': 1, 'value': 3}) self.cmp.compare.find({'partialFilterExpression_value': 1}) # We should not be able to add documents with duplicated `partialFilterExpression_value` # and `value` values. self.cmp.do.insert_one({'partialFilterExpression_value': 2, 'value': 3}) self.cmp.compare_exceptions.insert_one({'partialFilterExpression_value': 2, 'value': 3}) self.cmp.compare.find({'partialFilterExpression_value': 2, 'value': 3}) self.cmp.compare.find({}) def test_aggregate_project_with_boolean(self): self.cmp.do.drop() # Test with no items self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': []}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': []}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {}}}} ]) # Tests following are with one item self.cmp.do.insert_one({ 'items': [] }) # Test with 0 arguments self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': []}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': []}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {}}}} ]) # Test with one argument self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [True]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [True]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$not': True}}} ]) # Test with two arguments self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [True, True]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [False, True]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [True, False]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [False, False]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [True, True]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [False, True]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [True, False]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [False, False]}}} ]) # Following tests are with more than two items self.cmp.do.insert_many([ {'items': []}, {'items': []} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': []}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': []}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {}}}} ]) # Test with something else than boolean self.cmp.do.insert_one({ 'items': ['foo'] }) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$and': [{'$eq': ['$items', ['foo']]}]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$or': [{'$eq': ['$items', ['foo']]}]}}} ]) self.cmp.compare.aggregate([ {'$project': {'_id': 0, 'items': {'$not': {'$eq': ['$items', ['foo']]}}}} ]) def test__aggregate_project_missing_fields(self): self.cmp.do.insert_one({'_id': 1, 'arr': {'a': 2, 'b': 3}}) self.cmp.compare.aggregate([ {'$match': {'_id': 1}}, {'$project': OrderedDict([ ('_id', False), ('rename_dot', '$arr.c'), ('a', '$arr.a') ])} ]) def test__aggregate_graph_lookup_missing_field(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'_id': ObjectId(), 'name': 'a', 'child': 'b', 'val': 2}, {'_id': ObjectId(), 'name': 'b', 'child': 'c', 'val': 3}, {'_id': ObjectId(), 'name': 'c', 'child': None, 'val': 4}, {'_id': ObjectId(), 'name': 'd', 'child': 'a', 'val': 5} ]) pipeline = [ {'$match': {'name': 'a'}}, {'$graphLookup': { 'from': self.collection_name, 'startWith': '$fieldThatDoesNotExist', 'connectFromField': 'child', 'connectToField': 'name', 'as': 'lookup' }}, {'$unwind': '$lookup'}, {'$sort': {'lookup.name': 1}} ] self.cmp.compare.aggregate(pipeline) pipeline = [ {'$match': {'name': 'a'}}, {'$graphLookup': { 'from': self.collection_name, 'startWith': {'$concat': ['a', '$fieldThatDoesNotExist']}, 'connectFromField': 'child', 'connectToField': 'name', 'as': 'lookup' }}, {'$unwind': '$lookup'}, {'$sort': {'lookup.name': 1}} ] self.cmp.compare.aggregate(pipeline) def test__aggregate_merge_objects(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'_id': ObjectId(), 'a': '1', 'b': {'c': '1', 'd': 2}}, {'_id': ObjectId(), 'a': '1', 'b': {'e': 3, 'f': '4'}}, {'_id': ObjectId(), 'a': '1', 'c': '2'}, {'_id': ObjectId(), 'a': '1', 'b': None}, {'_id': ObjectId(), 'a': 2, 'b': None}, {'_id': ObjectId(), 'a': 2, 'b': {'c': None, 'd': 6}}, {'_id': ObjectId(), 'a': 2, 'b': {'c': '7', 'd': None, 'e': 9, 'f': '10'}}, {'_id': ObjectId(), 'a': 3, 'b': None}, {'_id': ObjectId(), 'a': 3, 'b': dict()}, {'_id': ObjectId(), 'a': 4, 'b': None}, ]) pipeline = [ {'$group': { '_id': '$a', 'merged_b': {'$mergeObjects': '$b'}, }} ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__add_fields(self): self.cmp.compare.aggregate([{'$addFields': {'c': 3}}]) self.cmp.compare.aggregate([{'$addFields': {'c': 4}}]) self.cmp.compare.aggregate([{'$addFields': {'b': {'$add': ['$a', '$b', 5]}}}]) def test__aggregate_with_missing_fields1(self): self.cmp.do.delete_many({}) data = [ {'_id': ObjectId(), 'a': 0, 'b': 1}, {'_id': ObjectId(), 'a': 0}, {'_id': ObjectId()}, ] self.cmp.do.insert_many(data) pipeline = [ {'$group': {'_id': '$a', 'b': {'$sum': '$b'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__group_with_missing_fields1(self): self.cmp.do.delete_many({}) data = [ {'_id': ObjectId(), 'a': 0, 'b': 0}, {'_id': ObjectId(), 'a': 0}, {'_id': ObjectId(), 'b': 0}, {'_id': ObjectId()}, ] self.cmp.do.insert_many(data) pipeline = [ {'$group': {'_id': {'a': '$a', 'b': '$b'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__group_with_missing_fields2(self): self.cmp.do.delete_many({}) data = [ {'_id': ObjectId(), 'a': 0}, {'_id': ObjectId()}, ] self.cmp.do.insert_many(data) pipeline = [ {'$group': {'_id': {'a': '$a'}}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__group_with_missing_fields3(self): self.cmp.do.delete_many({}) data = [ {'_id': ObjectId(), 'a': 0}, {'_id': ObjectId()}, ] self.cmp.do.insert_many(data) pipeline = [ {'$group': {'_id': '$a'}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) def test__add_fields_with_missing_fields(self): self.cmp.do.delete_many({}) data = [ {'a': 0}, {}, ] self.cmp.do.insert_many(data) pipeline = [ {'$addFields': {'b': '$a'}}, ] self.cmp.compare_ignore_order.aggregate(pipeline) @skipIf(not helpers.HAVE_PYMONGO, 'pymongo not installed') class MongoClientGraphLookupTest(_CollectionComparisonTest): def setUp(self): super(MongoClientGraphLookupTest, self).setUp() self.cmp_a = self._create_compare_for_collection('data_a') self.cmp_b = self._create_compare_for_collection('data_b') def test_graphlookup_basic(self): data_a = [ {'_id': 0, 'airport': 'JFK', 'connects': ['BOS', 'ORD']}, {'_id': 1, 'airport': 'BOS', 'connects': ['JFK', 'PWM']}, {'_id': 2, 'airport': 'ORD', 'connects': ['JFK']}, {'_id': 3, 'airport': 'PWM', 'connects': ['BOS', 'LHR']}, {'_id': 4, 'airport': 'LHR', 'connects': ['PWM']}, ] data_b = [ {'_id': 1, 'name': 'Dev', 'nearestAirport': 'JFK'}, {'_id': 2, 'name': 'Eliot', 'nearestAirport': 'JFK'}, {'_id': 3, 'name': 'Jeff', 'nearestAirport': 'BOS'}, ] query = [ { '$graphLookup': { 'from': 'a', 'startWith': '$nearestAirport', 'connectFromField': 'connects', 'connectToField': 'airport', 'maxDepth': 2, 'depthField': 'numConnections', 'as': 'destinations' } } ] self.cmp_a.do.insert_many(data_a) self.cmp_b.do.insert_many(data_b) self.cmp_b.compare.aggregate(query) def test_graphlookup_nested_array(self): data_a = [ {'_id': 0, 'airport': 'JFK', 'connects': [ {'to': 'BOS', 'distance': 200}, {'to': 'ORD', 'distance': 800}]}, {'_id': 1, 'airport': 'BOS', 'connects': [ {'to': 'JFK', 'distance': 200}, {'to': 'PWM', 'distance': 2000}]}, {'_id': 2, 'airport': 'ORD', 'connects': [{'to': 'JFK', 'distance': 800}]}, {'_id': 3, 'airport': 'PWM', 'connects': [ {'to': 'BOS', 'distance': 2000}, {'to': 'LHR', 'distance': 6000}]}, {'_id': 4, 'airport': 'LHR', 'connects': [{'to': 'PWM', 'distance': 6000}]}, ] data_b = [ {'_id': 1, 'name': 'Dev', 'nearestAirport': 'JFK'}, {'_id': 2, 'name': 'Eliot', 'nearestAirport': 'JFK'}, {'_id': 3, 'name': 'Jeff', 'nearestAirport': 'BOS'}, ] query = [ { '$graphLookup': { 'from': 'a', 'startWith': '$nearestAirport', 'connectFromField': 'connects.to', 'connectToField': 'airport', 'maxDepth': 2, 'depthField': 'numConnections', 'as': 'destinations' } } ] self.cmp_a.do.insert_many(data_a) self.cmp_b.do.insert_many(data_b) self.cmp_b.compare.aggregate(query) def test_graphlookup_nested_dict(self): data_b = [ {'_id': 1, 'name': 'Dev'}, {'_id': 2, 'name': 'Eliot', 'reportsTo': { 'name': 'Dev', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 3, 'name': 'Ron', 'reportsTo': {'name': 'Eliot', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 4, 'name': 'Andrew', 'reportsTo': { 'name': 'Eliot', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 5, 'name': 'Asya', 'reportsTo': { 'name': 'Ron', 'from': '2016-01-01T00:00:00.000Z'}}, {'_id': 6, 'name': 'Dan', 'reportsTo': {'name': 'Andrew', 'from': '2016-01-01T00:00:00.000Z'}}, ] data_a = [{'_id': 1, 'name': 'x'}] query = [ { '$graphLookup': { 'from': 'b', 'startWith': '$name', 'connectFromField': 'reportsTo.name', 'connectToField': 'name', 'as': 'reportingHierarchy' } } ] self.cmp_a.do.insert_many(data_a) self.cmp_b.do.insert_many(data_b) self.cmp_b.compare.aggregate(query) def test__aggregate_let(self): self.cmp.do.insert_many([ {'_id': 1, 'price': 10, 'tax': 0.50, 'applyDiscount': True}, {'_id': 2, 'price': 10, 'tax': 0.25, 'applyDiscount': False}, ]) self.cmp.compare.aggregate([{'$project': { 'finalTotal': { '$let': { 'vars': { 'total': {'$add': ['$price', '$tax']}, 'discounted': {'$cond': {'if': '$applyDiscount', 'then': 0.9, 'else': 1}}, }, 'in': {'$multiply': ['$$total', '$$discounted']}, }, }, }}]) def test__aggregate_let_errors(self): self.cmp.do.insert_many([ {'_id': 1, 'price': 10, 'tax': 0.50, 'applyDiscount': True}, {'_id': 2, 'price': 10, 'tax': 0.25, 'applyDiscount': False}, ]) self.cmp.compare_exceptions.aggregate([{'$project': { 'finalTotal': { '$let': [{'total': 3}, {'$$total'}], }, }}]) self.cmp.compare_exceptions.aggregate([{'$project': { 'finalTotal': { '$let': { 'in': {'$multiply': ['4', '3']}, }, }, }}]) self.cmp.compare_exceptions.aggregate([{'$project': { 'finalTotal': { '$let': { 'vars': ['total', 'discounted'], 'in': {'$multiply': ['$$total', '$$discounted']}, }, }, }}]) def _LIMIT(*args): return lambda cursor: cursor.limit(*args) def _SORT(*args): return lambda cursor: cursor.sort(*args) def _COUNT(cursor): return cursor.count() def _COUNT_EXCEPTION_TYPE(cursor): try: cursor.count() except Exception as error: return str(type(error)) assert False, 'Count should have failed' def _DISTINCT(*args): def sortkey(value): if isinstance(value, dict): return [(k, sortkey(v)) for k, v in sorted(value.items())] return value return lambda cursor: sorted(cursor.distinct(*args), key=sortkey) def _SKIP(*args): return lambda cursor: cursor.skip(*args) class MongoClientSortSkipLimitTest(_CollectionComparisonTest): def setUp(self): super(MongoClientSortSkipLimitTest, self).setUp() self.cmp.do.insert_many([{'_id': i, 'index': i} for i in range(30)]) def test__skip(self): self.cmp.compare(_SORT('index', 1), _SKIP(10)).find() def test__skipped_find(self): self.cmp.compare(_SORT('index', 1)).find(skip=10) def test__limit(self): self.cmp.compare(_SORT('index', 1), _LIMIT(10)).find() def test__negative_limit(self): self.cmp.compare(_SORT('index', 1), _LIMIT(-10)).find() def test__skip_and_limit(self): self.cmp.compare(_SORT('index', 1), _SKIP(10), _LIMIT(10)).find() @skipIf( helpers.PYMONGO_VERSION >= version.parse('4.0'), 'Cursor.count was removed in pymongo 4') def test__count(self): self.cmp.compare(_COUNT).find() @skipUnless( helpers.PYMONGO_VERSION >= version.parse('4.0'), 'Cursor.count was removed in pymongo 4') def test__count_fail(self): self.cmp.compare(_COUNT_EXCEPTION_TYPE).find() def test__sort_name(self): self.cmp.do.delete_many({}) for data in ({'a': 1, 'b': 3, 'c': 'data1'}, {'a': 2, 'b': 2, 'c': 'data3'}, {'a': 3, 'b': 1, 'c': 'data2'}): self.cmp.do.insert_one(data) self.cmp.compare(_SORT('a')).find() self.cmp.compare(_SORT('b')).find() def test__sort_name_nested_doc(self): self.cmp.do.delete_many({}) for data in ({'root': {'a': 1, 'b': 3, 'c': 'data1'}}, {'root': {'a': 2, 'b': 2, 'c': 'data3'}}, {'root': {'a': 3, 'b': 1, 'c': 'data2'}}): self.cmp.do.insert_one(data) self.cmp.compare(_SORT('root.a')).find() self.cmp.compare(_SORT('root.b')).find() def test__sort_name_nested_list(self): self.cmp.do.delete_many({}) for data in ({'root': [{'a': 1, 'b': 3, 'c': 'data1'}]}, {'root': [{'a': 2, 'b': 2, 'c': 'data3'}]}, {'root': [{'a': 3, 'b': 1, 'c': 'data2'}]}): self.cmp.do.insert_one(data) self.cmp.compare(_SORT('root.0.a')).find() self.cmp.compare(_SORT('root.0.b')).find() def test__sort_list(self): self.cmp.do.delete_many({}) for data in ({'a': 1, 'b': 3, 'c': 'data1'}, {'a': 2, 'b': 2, 'c': 'data3'}, {'a': 3, 'b': 1, 'c': 'data2'}): self.cmp.do.insert_one(data) self.cmp.compare(_SORT([('a', 1), ('b', -1)])).find() self.cmp.compare(_SORT([('b', 1), ('a', -1)])).find() self.cmp.compare(_SORT([('b', 1), ('a', -1), ('c', 1)])).find() def test__sort_list_nested_doc(self): self.cmp.do.delete_many({}) for data in ({'root': {'a': 1, 'b': 3, 'c': 'data1'}}, {'root': {'a': 2, 'b': 2, 'c': 'data3'}}, {'root': {'a': 3, 'b': 1, 'c': 'data2'}}): self.cmp.do.insert_one(data) self.cmp.compare(_SORT([('root.a', 1), ('root.b', -1)])).find() self.cmp.compare(_SORT([('root.b', 1), ('root.a', -1)])).find() self.cmp.compare( _SORT([('root.b', 1), ('root.a', -1), ('root.c', 1)])).find() def test__sort_list_nested_list(self): self.cmp.do.delete_many({}) for data in ({'root': [{'a': 1, 'b': 3, 'c': 'data1'}]}, {'root': [{'a': 2, 'b': 2, 'c': 'data3'}]}, {'root': [{'a': 3, 'b': 1, 'c': 'data2'}]}): self.cmp.do.insert_one(data) self.cmp.compare(_SORT([('root.0.a', 1), ('root.0.b', -1)])).find() self.cmp.compare(_SORT([('root.0.b', 1), ('root.0.a', -1)])).find() self.cmp.compare( _SORT( [('root.0.b', 1), ('root.0.a', -1), ('root.0.c', 1)])).find() def test__sort_dict(self): self.cmp.do.delete_many({}) self.cmp.do.insert_many([ {'a': 1, 'b': OrderedDict([('value', 1), ('other', True)])}, {'a': 2, 'b': OrderedDict([('value', 3)])}, {'a': 3, 'b': OrderedDict([('value', 2), ('other', False)])}, ]) self.cmp.compare(_SORT('b')).find() def test__close(self): # Does nothing - just make sure it exists and takes the right args self.cmp.do(lambda cursor: cursor.close()).find() def test__distinct_nested_field(self): self.cmp.do.insert_one({'f1': {'f2': 'v'}}) self.cmp.compare(_DISTINCT('f1.f2')).find() def test__distinct_array_field(self): self.cmp.do.insert_many( [{'f1': ['v1', 'v2', 'v1']}, {'f1': ['v2', 'v3']}]) self.cmp.compare(_DISTINCT('f1')).find() def test__distinct_array_nested_field(self): self.cmp.do.insert_one({'f1': [{'f2': 'v'}, {'f2': 'w'}]}) self.cmp.compare(_DISTINCT('f1.f2')).find() def test__distinct_array_field_with_dicts(self): self.cmp.do.insert_many([ {'f1': [{'f2': 'v2'}, {'f3': 'v3'}]}, {'f1': [{'f3': 'v3'}, {'f4': 'v4'}]}, ]) self.cmp.compare(_DISTINCT('f1')).find() class InsertedDocumentTest(TestCase): def setUp(self): super(InsertedDocumentTest, self).setUp() self.collection = mongomock.MongoClient().db.collection self.data = {'a': 1, 'b': [1, 2, 3], 'c': {'d': 4}} self.orig_data = copy.deepcopy(self.data) self.object_id = self.collection.insert_one(self.data).inserted_id def test__object_is_consistent(self): [object] = self.collection.find() self.assertEqual(object['_id'], self.object_id) def test__find_by_id(self): [object] = self.collection.find({'_id': self.object_id}) self.assertEqual(object, self.data) @skipIf( helpers.PYMONGO_VERSION and helpers.PYMONGO_VERSION >= version.parse('4.0'), 'remove was removed in pymongo v4') def test__remove_by_id(self): self.collection.remove(self.object_id) self.assertEqual(0, self.collection.count_documents({})) def test__inserting_changes_argument(self): # Like pymongo, we should fill the _id in the inserted dict # (odd behavior, but we need to stick to it) self.assertEqual(self.data, dict(self.orig_data, _id=self.object_id)) def test__data_is_copied(self): [object] = self.collection.find() self.assertEqual(dict(self.orig_data, _id=self.object_id), object) self.data.pop('a') self.data['b'].append(5) self.assertEqual(dict(self.orig_data, _id=self.object_id), object) [object] = self.collection.find() self.assertEqual(dict(self.orig_data, _id=self.object_id), object) def test__find_returns_copied_object(self): [object1] = self.collection.find() [object2] = self.collection.find() self.assertEqual(object1, object2) self.assertIsNot(object1, object2) object1['b'].append('bla') self.assertNotEqual(object1, object2) class ObjectIdTest(TestCase): def test__equal_with_same_id(self): obj1 = ObjectId() obj2 = ObjectId(str(obj1)) self.assertEqual(obj1, obj2) class MongoClientTest(_CollectionComparisonTest): """Compares a fake connection with the real mongo connection implementation This is done via cross-comparison of the results. """ def setUp(self): super(MongoClientTest, self).setUp() self.cmp = MultiCollection({'fake': self.fake_conn, 'real': self.mongo_conn}) def test__database_names(self): if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.database_names() return self.cmp.do.database_names() class DatabaseTest(_CollectionComparisonTest): """Compares a fake database with the real mongo database implementation This is done via cross-comparison of the results. """ def setUp(self): super(DatabaseTest, self).setUp() self.cmp = MultiCollection({ 'fake': self.fake_conn[self.db_name], 'real': self.mongo_conn[self.db_name], }) def test__database_names(self): if helpers.PYMONGO_VERSION >= version.parse('4.0'): self.cmp.compare_exceptions.collection_names() return self.cmp.do.collection_names() mongomock-4.1.3/tests/test__not_implemented.py000066400000000000000000000022441456276026200216210ustar00rootroot00000000000000import unittest import mongomock class NotImplementedTests(unittest.TestCase): def tearDown(self): mongomock.warn_on_feature('session') def test_raises(self): collection = mongomock.MongoClient().db.collection with self.assertRaises(NotImplementedError): collection.insert_one({}, session=True) def test_ignores(self): mongomock.ignore_feature('session') collection = mongomock.MongoClient().db.collection collection.insert_one({}, session=True) def test_on_and_off(self): collection = mongomock.MongoClient().db.collection with self.assertRaises(NotImplementedError): collection.insert_one({'_id': 1}, session=True) mongomock.ignore_feature('session') collection.insert_one({'_id': 2}, session=True) mongomock.warn_on_feature('session') with self.assertRaises(NotImplementedError): collection.insert_one({'_id': 3}, session=True) self.assertEqual({2}, {doc['_id'] for doc in collection.find()}) def test_wrong_key(self): with self.assertRaises(KeyError): mongomock.ignore_feature('sessions') mongomock-4.1.3/tests/test__patch.py000066400000000000000000000121301456276026200175300ustar00rootroot00000000000000import time import unittest import mongomock try: import pymongo _HAVE_PYMONGO = True except ImportError: _HAVE_PYMONGO = False try: from unittest import mock except ImportError: import mock import platform _USING_PYPY = platform.python_implementation() == 'PyPy' @unittest.skipIf(not _HAVE_PYMONGO, 'pymongo not installed') @unittest.skipIf(_USING_PYPY, 'PyPy does not handle mocking time sleep properly') class PatchTest(unittest.TestCase): """Test the use of the patch function. Test functions in this test are embedded in inner function so that the patch decorator are only called at testing time. """ @mongomock.patch() def test__decorator(self): client1 = pymongo.MongoClient() client1.db.coll.insert_one({'name': 'Pascal'}) client2 = pymongo.MongoClient() self.assertEqual(['db'], client2.list_database_names()) self.assertEqual('Pascal', client2.db.coll.find_one()['name']) client2.db.coll.drop() self.assertEqual(None, client1.db.coll.find_one()) @mongomock.patch(on_new='create') def test__create_new(self): client1 = pymongo.MongoClient('myserver.example.com', port=12345) client1.db.coll.insert_one({'name': 'Pascal'}) client2 = pymongo.MongoClient(host='myserver.example.com', port=12345) self.assertEqual('Pascal', client2.db.coll.find_one()['name']) @mongomock.patch() def test__error_new(self): # Valid because using the default server which was whitelisted by default. pymongo.MongoClient() with self.assertRaises(ValueError): pymongo.MongoClient('myserver.example.com', port=12345) @mongomock.patch(( 'mongodb://myserver.example.com:12345', 'mongodb://otherserver.example.com:27017/default-db', 'mongodb://[2001:67c:2e8:22::c100:68b]', 'mongodb://[2001:67c:2e8:22::c100:68b]:1234', 'mongodb://r1.example.net:27017,r2.example.net:27017/', '/var/lib/mongo.sock', )) def test__create_servers(self): pymongo.MongoClient('myserver.example.com', port=12345) pymongo.MongoClient('otherserver.example.com') pymongo.MongoClient('[2001:67c:2e8:22::c100:68b]') pymongo.MongoClient('mongodb://[2001:67c:2e8:22::c100:68b]:27017/base') pymongo.MongoClient('[2001:67c:2e8:22::c100:68b]', port=1234) pymongo.MongoClient('r1.example.net') pymongo.MongoClient('/var/lib/mongo.sock') with self.assertRaises(ValueError): pymongo.MongoClient() @mongomock.patch(on_new='timeout') @mock.patch(time.__name__ + '.sleep') def test__create_timeout(self, mock_sleep): pymongo.MongoClient() mock_sleep.reset_mock() with self.assertRaises(pymongo.errors.ServerSelectionTimeoutError): client = pymongo.MongoClient('myserver.example.com', port=12345) client.db.coll.insert_one({'name': 'Pascal'}) mock_sleep.assert_called_once_with(30000) @mongomock.patch('example.com') def test__different_default_db(self): client_1 = pymongo.MongoClient('mongodb://example.com/db1') client_2 = pymongo.MongoClient('mongodb://example.com/db2') # Access the same data from different clients, despite the different DB. client_1.test_db.collection.insert_one({'name': 'Pascal'}) self.assertEqual(['Pascal'], [d['name'] for d in client_2.test_db.collection.find()]) # Access the data from "default DB" of client 1 but by its name from client 2. client_1.get_default_database().collection.insert_one({'name': 'Lascap'}) self.assertEqual(['Lascap'], [d['name'] for d in client_2.db1.collection.find()]) # Access the data from "default DB" of client 2 but by its name from client 1. client_2.get_default_database().collection.insert_one({'name': 'Caribou'}) self.assertEqual(['Caribou'], [d['name'] for d in client_1.db2.collection.find()]) @mongomock.patch(('my-db_client-url',)) def test__rename_through_another_client(self): client1 = pymongo.MongoClient('mongodb://my-db_client-url/test') client1.test.my_collec.insert_one({'_id': 'Previous data'}) client2 = pymongo.MongoClient('mongodb://my-db_client-url/test') client2.test.drop_collection('my_collec') client2.test.other_collec.insert_one({'_id': 'New data'}) client2.test.other_collec.rename('my_collec') self.assertEqual(['New data'], [d['_id'] for d in client1.test.my_collec.find()]) @mongomock.patch(servers=(('server.example.com', 27017),)) def test__tuple_server_host_and_port(self): objects = [dict(votes=1), dict(votes=2)] client = pymongo.MongoClient('server.example.com') client.db.collection.insert_many(objects) collection = pymongo.MongoClient('server.example.com').db.collection for document in collection.find(): collection.update_one(document, {'$set': {'votes': document['votes'] + 1}}) self.assertEqual([2, 3], sorted(d.get('votes') for d in client.db.collection.find())) if __name__ == '__main__': unittest.main() mongomock-4.1.3/tests/test__readme_doctest.py000066400000000000000000000006521456276026200214210ustar00rootroot00000000000000import doctest import os from unittest import TestCase class ReadMeDocTest(TestCase): def test__readme_doctests(self): readme_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'README.rst')) self.assertTrue(os.path.exists(readme_path)) result = doctest.testfile(readme_path, module_relative=False) self.assertEqual(result.failed, 0, '%s tests failed!' % result.failed) mongomock-4.1.3/tests/test__thread.py000066400000000000000000000011071456276026200177020ustar00rootroot00000000000000import unittest from mongomock.store import RWLock class LockTestCase(unittest.TestCase): def test_rwlock_exception(self): """Asserts exceptions occur between a lock's acquire/release""" lock = RWLock() for method in [lock.reader, lock.writer]: try: with method(): raise ValueError except ValueError: pass # Accessing private attributes but oh well self.assertFalse(lock._no_writers.locked()) self.assertFalse(lock._no_readers.locked()) mongomock-4.1.3/tests/types/000077500000000000000000000000001456276026200160305ustar00rootroot00000000000000mongomock-4.1.3/tests/types/patch.py000066400000000000000000000003041456276026200174760ustar00rootroot00000000000000import mongomock @mongomock.patch(servers=(('server.example.com', 27017),)) class MyTestA(object): ... @mongomock.patch(('mydata.com', 'myprivatedata.com')) class MyTestB(object): ... mongomock-4.1.3/tests/utils.py000066400000000000000000000005271456276026200164020ustar00rootroot00000000000000class DBRef(object): def __init__(self, collection, id, database=None): self.collection = collection self.id = id self.database = database def as_doc(self): doc = {'$ref': self.collection, '$id': self.id} if self.database is not None: doc['$db'] = self.database return doc mongomock-4.1.3/tox.ini000066400000000000000000000032161456276026200150370ustar00rootroot00000000000000[tox] envlist = pep8,pylint,{pypy3,py36,py37,py38,py39,py310}-pymongo-pyexecjs,py310,py310-{pymongo,pyexecjs},py39-{pymongo3,pymongo361}-pyexecjs,py310-pymongo-pyexecjs-mongodb4 [testenv] passenv = CI TRAVIS TRAVIS_* TEST_MONGO_HOST MONGODB setenv = EXECJS_RUNTIME = Node basepython = py36: python3.6 py37: python3.7 py38: python3.8 py39: python3.9 py310: python3.10 pypy3: pypy3 deps= codecov coverage pymongo: pymongo pymongo3: pymongo<4.0 pymongo361: pymongo==3.6.1 pyexecjs: pyexecjs pyexecjs: nodeenv commands_pre= pyexecjs: nodeenv --prebuilt -p commands= coverage run --source=mongomock -m unittest {posargs:discover} codecov [testenv:pep8] basepython = python3.10 deps = hacking==4.1.0 commands = flake8 {posargs} [testenv:pylint] basepython = python3.10 deps = pylint pylint-quotes pymongo pyexecjs commands = pylint --rcfile=tox.ini --load-plugins pylint_quotes mongomock tests {posargs} [flake8] ignore = H803,H301,H302,H802,E402,W503,H216 exclude = .venv,.tox,dist,doc,*.egg,build,.ropeproject,.nodeenv max-line-length = 100 [testenv:mypy] basepython = python3.10 deps = mypy pymongo-stubs commands = mypy tests/types [pylint] ; TODO: Reenable all or at least most of those. disable = missing-docstring,R,invalid-name,fixme,unused-argument,redefined-builtin,protected-access,wrong-import-order,attribute-defined-outside-init,cell-var-from-loop,broad-except,dangerous-default-value,too-many-lines,ungrouped-imports,no-member,too-many-function-args,consider-using-f-string,deprecated-class,redundant-u-string-prefix,unused-private-member,unspecified-encoding enable = no-else-return string-quote=single-avoid-escape