pax_global_header00006660000000000000000000000064117714317710014523gustar00rootroot0000000000000052 comment=7a1b110f62d1c4f19ea84cef3fdb8af498325b4a MongoEngine-mongoengine-7a1b110/000077500000000000000000000000001177143177100165515ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/.gitignore000066400000000000000000000002511177143177100205370ustar00rootroot00000000000000.* !.gitignore *~ *.py[co] .*.sw[po] *.egg docs/.build docs/_build build/ dist/ mongoengine.egg-info/ env/ .settings .project .pydevproject tests/test_bugfix.py htmlcov/MongoEngine-mongoengine-7a1b110/.travis.yml000066400000000000000000000004731177143177100206660ustar00rootroot00000000000000# http://travis-ci.org/#!/MongoEngine/mongoengine language: python python: - 2.6 - 2.7 install: - sudo apt-get install zlib1g zlib1g-dev - sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/ - pip install PIL --use-mirrors ; true - python setup.py install script: - python setup.py testMongoEngine-mongoengine-7a1b110/AUTHORS000066400000000000000000000040151177143177100176210ustar00rootroot00000000000000The PRIMARY AUTHORS are (and/or have been): Ross Lawley Harry Marr Matt Dennewitz Deepak Thukral Florian Schlachter Steve Challis Wilson Júnior Dan Crosta https://github.com/dcrosta CONTRIBUTORS Dervived from the git logs, inevitably incomplete but all of whom and others have submitted patches, reported bugs and generally helped make MongoEngine that much better: * Harry Marr * Ross Lawley * blackbrrr * Florian Schlachter * Vincent Driessen * Steve Challis * flosch * Deepak Thukral * Colin Howe * Wilson Júnior * Alistair Roche * Dan Crosta * Viktor Kerkez * Stephan Jaekel * Rached Ben Mustapha * Greg Turner * Daniel Hasselrot * Mircea Pasoi * Matt Chisholm * James Punteney * TimothéePeignier * Stuart Rackham * Serge Matveenko * Matt Dennewitz * Don Spaulding * Ales Zoulek * sshwsfc * sib * Samuel Clay * Nick Vlku * martin * Flavio Amieiro * Анхбаяр Лхагвадорж * Zak Johnson * Victor Farazdagi * vandersonmota * Theo Julienne * sp * Slavi Pantaleev * Richard Henry * Nicolas Perriault * Nick Vlku Jr * Michael Henson * Leo Honkanen * kuno * Josh Ourisman * Jaime * Igor Ivanov * Gregg Lind * Gareth Lloyd * Albert Choi * John Arnfield * grubberr * Paul Aliagas * Paul Cunnane * Julien Rebetez * Marc Tamlyn * Karim Allah * Adam Parrish * jpfarias * jonrscott * Alice Zoë Bevan-McGregor * Stephen Young * tkloc * aid * yamaneko1212 * dave mankoff * Alexander G. Morano * jwilder * Joe Shaw * Adam Flynn * Ankhbayar * Jan Schrewe * David Koblas * Crittercism * Alvin Liang * andrewmlevy * Chris Faulkner * Ashwin Purohit * Shalabh Aggarwal * Chris Williams * Robert Kajic * Jacob Peddicord * Nils Hasenbanck * mostlystatic * Greg Banks * swashbuckler * Adam Reeve * Anthony Nemitz * deignacio * shaunduncan * Meir Kriheli * Andrey Fedoseev * aparajita * Tristan EscaladaMongoEngine-mongoengine-7a1b110/LICENSE000066400000000000000000000020461177143177100175600ustar00rootroot00000000000000Copyright (c) 2009-2010 Harry Marr Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. MongoEngine-mongoengine-7a1b110/MANIFEST.in000066400000000000000000000001621177143177100203060ustar00rootroot00000000000000include MANIFEST.in include README.rst include LICENSE include AUTHORS recursive-include docs * prune docs/_build MongoEngine-mongoengine-7a1b110/README.rst000066400000000000000000000063141177143177100202440ustar00rootroot00000000000000=========== MongoEngine =========== :Info: MongoEngine is an ORM-like layer on top of PyMongo. :Author: Harry Marr (http://github.com/hmarr) :Maintainer: Ross Lawley (http://github.com/rozza) .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master :target: http://travis-ci.org/MongoEngine/mongoengine About ===== MongoEngine is a Python Object-Document Mapper for working with MongoDB. Documentation available at http://mongoengine-odm.rtfd.org - there is currently a `tutorial `_, a `user guide `_ and an `API reference `_. Installation ============ If you have `setuptools `_ you can use ``easy_install -U mongoengine``. Otherwise, you can download the source from `GitHub `_ and run ``python setup.py install``. Dependencies ============ - pymongo 2.1.1+ - sphinx (optional - for documentation generation) Examples ======== Some simple examples of what MongoEngine code looks like:: class BlogPost(Document): title = StringField(required=True, max_length=200) posted = DateTimeField(default=datetime.datetime.now) tags = ListField(StringField(max_length=50)) class TextPost(BlogPost): content = StringField(required=True) class LinkPost(BlogPost): url = StringField(required=True) # Create a text-based post >>> post1 = TextPost(title='Using MongoEngine', content='See the tutorial') >>> post1.tags = ['mongodb', 'mongoengine'] >>> post1.save() # Create a link-based post >>> post2 = LinkPost(title='MongoEngine Docs', url='hmarr.com/mongoengine') >>> post2.tags = ['mongoengine', 'documentation'] >>> post2.save() # Iterate over all posts using the BlogPost superclass >>> for post in BlogPost.objects: ... print '===', post.title, '===' ... if isinstance(post, TextPost): ... print post.content ... elif isinstance(post, LinkPost): ... print 'Link:', post.url ... print ... === Using MongoEngine === See the tutorial === MongoEngine Docs === Link: hmarr.com/mongoengine >>> len(BlogPost.objects) 2 >>> len(HtmlPost.objects) 1 >>> len(LinkPost.objects) 1 # Find tagged posts >>> len(BlogPost.objects(tags='mongoengine')) 2 >>> len(BlogPost.objects(tags='mongodb')) 1 Tests ===== To run the test suite, ensure you are running a local instance of MongoDB on the standard port, and run ``python setup.py test``. Community ========= - `MongoEngine Users mailing list `_ - `MongoEngine Developers mailing list `_ - `#mongoengine IRC channel `_ Contributing ============ The source is available on `GitHub `_ - to contribute to the project, fork it on GitHub and send a pull request, all contributions and suggestions are welcome! MongoEngine-mongoengine-7a1b110/benchmark.py000066400000000000000000000126471177143177100210670ustar00rootroot00000000000000#!/usr/bin/env python import timeit def cprofile_main(): from pymongo import Connection connection = Connection() connection.drop_database('timeit_test') connection.disconnect() from mongoengine import Document, DictField, connect connect("timeit_test") class Noddy(Document): fields = DictField() for i in xrange(1): noddy = Noddy() for j in range(20): noddy.fields["key" + str(j)] = "value " + str(j) noddy.save() def main(): """ 0.4 Performance Figures ... ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - Pymongo 1.1141769886 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine 2.37724113464 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, safe=False, validate=False 1.92479610443 0.5.X ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - Pymongo 1.10552310944 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine 16.5169169903 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, safe=False, validate=False 14.9446101189 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False 14.912801981 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, force=True 14.9617750645 Performance ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - Pymongo 1.10072994232 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine 5.27341103554 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, safe=False, validate=False 4.49365401268 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False 4.43459296227 ---------------------------------------------------------------------------------------------------- Creating 10000 dictionaries - MongoEngine, force=True 4.40114378929 """ setup = """ from pymongo import Connection connection = Connection() connection.drop_database('timeit_test') """ stmt = """ from pymongo import Connection connection = Connection() db = connection.timeit_test noddy = db.noddy for i in xrange(10000): example = {'fields': {}} for j in range(20): example['fields']["key"+str(j)] = "value "+str(j) noddy.insert(example) myNoddys = noddy.find() [n for n in myNoddys] # iterate """ print "-" * 100 print """Creating 10000 dictionaries - Pymongo""" t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) setup = """ from pymongo import Connection connection = Connection() connection.drop_database('timeit_test') connection.disconnect() from mongoengine import Document, DictField, connect connect("timeit_test") class Noddy(Document): fields = DictField() """ stmt = """ for i in xrange(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) noddy.save() myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ print "-" * 100 print """Creating 10000 dictionaries - MongoEngine""" t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) stmt = """ for i in xrange(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) noddy.save(safe=False, validate=False) myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ print "-" * 100 print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) stmt = """ for i in xrange(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) noddy.save(safe=False, validate=False, cascade=False) myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ print "-" * 100 print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False""" t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) stmt = """ for i in xrange(10000): noddy = Noddy() for j in range(20): noddy.fields["key"+str(j)] = "value "+str(j) noddy.save(force_insert=True, safe=False, validate=False, cascade=False) myNoddys = Noddy.objects() [n for n in myNoddys] # iterate """ print "-" * 100 print """Creating 10000 dictionaries - MongoEngine, force=True""" t = timeit.Timer(stmt=stmt, setup=setup) print t.timeit(1) if __name__ == "__main__": main() MongoEngine-mongoengine-7a1b110/docs/000077500000000000000000000000001177143177100175015ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/docs/Makefile000066400000000000000000000061001177143177100211360ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MongoEngine.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MongoEngine.qhc" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." MongoEngine-mongoengine-7a1b110/docs/_themes/000077500000000000000000000000001177143177100211255ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/docs/_themes/nature/000077500000000000000000000000001177143177100224235ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/docs/_themes/nature/static/000077500000000000000000000000001177143177100237125ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/docs/_themes/nature/static/nature.css_t000066400000000000000000000074401177143177100262520ustar00rootroot00000000000000/** * Sphinx stylesheet -- default theme * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */ @import url("basic.css"); /* -- page layout ----------------------------------------------------------- */ body { font-family: Arial, sans-serif; font-size: 100%; background-color: #111; color: #555; margin: 0; padding: 0; } div.documentwrapper { float: left; width: 100%; } div.bodywrapper { margin: 0 0 0 230px; } hr{ border: 1px solid #B1B4B6; } div.document { background-color: #eee; } div.body { background-color: #ffffff; color: #3E4349; padding: 0 30px 30px 30px; font-size: 0.8em; } div.footer { color: #555; width: 100%; padding: 13px 0; text-align: center; font-size: 75%; } div.footer a { color: #444; text-decoration: underline; } div.related { background-color: #6BA81E; line-height: 32px; color: #fff; text-shadow: 0px 1px 0 #444; font-size: 0.80em; } div.related a { color: #E2F3CC; } div.sphinxsidebar { font-size: 0.75em; line-height: 1.5em; } div.sphinxsidebarwrapper{ padding: 20px 0; } div.sphinxsidebar h3, div.sphinxsidebar h4 { font-family: Arial, sans-serif; color: #222; font-size: 1.2em; font-weight: normal; margin: 0; padding: 5px 10px; background-color: #ddd; text-shadow: 1px 1px 0 white } div.sphinxsidebar h4{ font-size: 1.1em; } div.sphinxsidebar h3 a { color: #444; } div.sphinxsidebar p { color: #888; padding: 5px 20px; } div.sphinxsidebar p.topless { } div.sphinxsidebar ul { margin: 10px 20px; padding: 0; color: #000; } div.sphinxsidebar a { color: #444; } div.sphinxsidebar input { border: 1px solid #ccc; font-family: sans-serif; font-size: 1em; } div.sphinxsidebar input[type=text]{ margin-left: 20px; } /* -- body styles ----------------------------------------------------------- */ a { color: #005B81; text-decoration: none; } a:hover { color: #E32E00; text-decoration: underline; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { font-family: Arial, sans-serif; background-color: #BED4EB; font-weight: normal; color: #212224; margin: 30px 0px 10px 0px; padding: 5px 0 5px 10px; text-shadow: 0px 1px 0 white } div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } div.body h2 { font-size: 150%; background-color: #C8D5E3; } div.body h3 { font-size: 120%; background-color: #D8DEE3; } div.body h4 { font-size: 110%; background-color: #D8DEE3; } div.body h5 { font-size: 100%; background-color: #D8DEE3; } div.body h6 { font-size: 100%; background-color: #D8DEE3; } a.headerlink { color: #c60f0f; font-size: 0.8em; padding: 0 4px 0 4px; text-decoration: none; } a.headerlink:hover { background-color: #c60f0f; color: white; } div.body p, div.body dd, div.body li { line-height: 1.5em; } div.admonition p.admonition-title + p { display: inline; } div.highlight{ background-color: white; } div.note { background-color: #eee; border: 1px solid #ccc; } div.seealso { background-color: #ffc; border: 1px solid #ff6; } div.topic { background-color: #eee; } div.warning { background-color: #ffe4e4; border: 1px solid #f66; } p.admonition-title { display: inline; } p.admonition-title:after { content: ":"; } pre { padding: 10px; background-color: White; color: #222; line-height: 1.2em; border: 1px solid #C6C9CB; font-size: 1.2em; margin: 1.5em 0 1.5em 0; -webkit-box-shadow: 1px 1px 1px #d8d8d8; -moz-box-shadow: 1px 1px 1px #d8d8d8; } tt { background-color: #ecf0f3; color: #222; padding: 1px 2px; font-size: 1.2em; font-family: monospace; } MongoEngine-mongoengine-7a1b110/docs/_themes/nature/static/pygments.css000066400000000000000000000052351177143177100262770ustar00rootroot00000000000000.c { color: #999988; font-style: italic } /* Comment */ .k { font-weight: bold } /* Keyword */ .o { font-weight: bold } /* Operator */ .cm { color: #999988; font-style: italic } /* Comment.Multiline */ .cp { color: #999999; font-weight: bold } /* Comment.preproc */ .c1 { color: #999988; font-style: italic } /* Comment.Single */ .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ .ge { font-style: italic } /* Generic.Emph */ .gr { color: #aa0000 } /* Generic.Error */ .gh { color: #999999 } /* Generic.Heading */ .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ .go { color: #111 } /* Generic.Output */ .gp { color: #555555 } /* Generic.Prompt */ .gs { font-weight: bold } /* Generic.Strong */ .gu { color: #aaaaaa } /* Generic.Subheading */ .gt { color: #aa0000 } /* Generic.Traceback */ .kc { font-weight: bold } /* Keyword.Constant */ .kd { font-weight: bold } /* Keyword.Declaration */ .kp { font-weight: bold } /* Keyword.Pseudo */ .kr { font-weight: bold } /* Keyword.Reserved */ .kt { color: #445588; font-weight: bold } /* Keyword.Type */ .m { color: #009999 } /* Literal.Number */ .s { color: #bb8844 } /* Literal.String */ .na { color: #008080 } /* Name.Attribute */ .nb { color: #999999 } /* Name.Builtin */ .nc { color: #445588; font-weight: bold } /* Name.Class */ .no { color: #ff99ff } /* Name.Constant */ .ni { color: #800080 } /* Name.Entity */ .ne { color: #990000; font-weight: bold } /* Name.Exception */ .nf { color: #990000; font-weight: bold } /* Name.Function */ .nn { color: #555555 } /* Name.Namespace */ .nt { color: #000080 } /* Name.Tag */ .nv { color: purple } /* Name.Variable */ .ow { font-weight: bold } /* Operator.Word */ .mf { color: #009999 } /* Literal.Number.Float */ .mh { color: #009999 } /* Literal.Number.Hex */ .mi { color: #009999 } /* Literal.Number.Integer */ .mo { color: #009999 } /* Literal.Number.Oct */ .sb { color: #bb8844 } /* Literal.String.Backtick */ .sc { color: #bb8844 } /* Literal.String.Char */ .sd { color: #bb8844 } /* Literal.String.Doc */ .s2 { color: #bb8844 } /* Literal.String.Double */ .se { color: #bb8844 } /* Literal.String.Escape */ .sh { color: #bb8844 } /* Literal.String.Heredoc */ .si { color: #bb8844 } /* Literal.String.Interpol */ .sx { color: #bb8844 } /* Literal.String.Other */ .sr { color: #808000 } /* Literal.String.Regex */ .s1 { color: #bb8844 } /* Literal.String.Single */ .ss { color: #bb8844 } /* Literal.String.Symbol */ .bp { color: #999999 } /* Name.Builtin.Pseudo */ .vc { color: #ff99ff } /* Name.Variable.Class */ .vg { color: #ff99ff } /* Name.Variable.Global */ .vi { color: #ff99ff } /* Name.Variable.Instance */ .il { color: #009999 } /* Literal.Number.Integer.Long */MongoEngine-mongoengine-7a1b110/docs/_themes/nature/theme.conf000066400000000000000000000001071177143177100243720ustar00rootroot00000000000000[theme] inherit = basic stylesheet = nature.css pygments_style = tango MongoEngine-mongoengine-7a1b110/docs/apireference.rst000066400000000000000000000033451177143177100226700ustar00rootroot00000000000000============= API Reference ============= Connecting ========== .. autofunction:: mongoengine.connect .. autofunction:: mongoengine.register_connection Documents ========= .. autoclass:: mongoengine.Document :members: .. attribute:: objects A :class:`~mongoengine.queryset.QuerySet` object that is created lazily on access. .. autoclass:: mongoengine.EmbeddedDocument :members: .. autoclass:: mongoengine.DynamicDocument :members: .. autoclass:: mongoengine.DynamicEmbeddedDocument :members: .. autoclass:: mongoengine.document.MapReduceDocument :members: .. autoclass:: mongoengine.ValidationError :members: Querying ======== .. autoclass:: mongoengine.queryset.QuerySet :members: .. automethod:: mongoengine.queryset.QuerySet.__call__ .. autofunction:: mongoengine.queryset.queryset_manager Fields ====== .. autoclass:: mongoengine.StringField .. autoclass:: mongoengine.URLField .. autoclass:: mongoengine.EmailField .. autoclass:: mongoengine.IntField .. autoclass:: mongoengine.FloatField .. autoclass:: mongoengine.DecimalField .. autoclass:: mongoengine.DateTimeField .. autoclass:: mongoengine.ComplexDateTimeField .. autoclass:: mongoengine.ListField .. autoclass:: mongoengine.SortedListField .. autoclass:: mongoengine.DictField .. autoclass:: mongoengine.MapField .. autoclass:: mongoengine.ObjectIdField .. autoclass:: mongoengine.ReferenceField .. autoclass:: mongoengine.GenericReferenceField .. autoclass:: mongoengine.EmbeddedDocumentField .. autoclass:: mongoengine.GenericEmbeddedDocumentField .. autoclass:: mongoengine.BooleanField .. autoclass:: mongoengine.FileField .. autoclass:: mongoengine.BinaryField .. autoclass:: mongoengine.GeoPointField .. autoclass:: mongoengine.SequenceField MongoEngine-mongoengine-7a1b110/docs/changelog.rst000066400000000000000000000322731177143177100221710ustar00rootroot00000000000000========= Changelog ========= Changes in 0.6.13 ================ - Fixed EmbeddedDocument db_field validation issue - Fixed StringField unicode issue - Fixes __repr__ modifying the cursor Changes in 0.6.12 ================= - Fixes scalar lookups for primary_key - Fixes error with _delta handling DBRefs Changes in 0.6.11 ================== - Fixed inconsistency handling None values field attrs - Fixed map_field embedded db_field issue - Fixed .save() _delta issue with DbRefs - Fixed Django TestCase - Added cmp to Embedded Document - Added PULL reverse_delete_rule - Fixed CASCADE delete bug - Fixed db_field data load error - Fixed recursive save with FileField Changes in 0.6.10 ================= - Fixed basedict / baselist to return super(..) - Promoted BaseDynamicField to DynamicField Changes in 0.6.9 ================ - Fixed sparse indexes on inherited docs - Removed FileField auto deletion, needs more work maybe 0.7 Changes in 0.6.8 ================ - Fixed FileField losing reference when no default set - Removed possible race condition from FileField (grid_file) - Added assignment to save, can now do: b = MyDoc(**kwargs).save() - Added support for pull operations on nested EmbeddedDocuments - Added support for choices with GenericReferenceFields - Added support for choices with GenericEmbeddedDocumentFields - Fixed Django 1.4 sessions first save data loss - FileField now automatically delete files on .delete() - Fix for GenericReference to_mongo method - Fixed connection regression - Updated Django User document, now allows inheritance Changes in 0.6.7 ================ - Fixed indexing on '_id' or 'pk' or 'id' - Invalid data from the DB now raises a InvalidDocumentError - Cleaned up the Validation Error - docs and code - Added meta `auto_create_index` so you can disable index creation - Added write concern options to inserts - Fixed typo in meta for index options - Bug fix Read preference now passed correctly - Added support for File like objects for GridFS - Fix for #473 - Dereferencing abstracts Changes in 0.6.6 ================ - Django 1.4 fixed (finally) - Added tests for Django Changes in 0.6.5 ================ - More Django updates Changes in 0.6.4 ================ - Refactored connection / fixed replicasetconnection - Bug fix for unknown connection alias error message - Sessions support Django 1.3 and Django 1.4 - Minor fix for ReferenceField Changes in 0.6.3 ================ - Updated sessions for Django 1.4 - Bug fix for updates where listfields contain embedded documents - Bug fix for collection naming and mixins Changes in 0.6.2 ================ - Updated documentation for ReplicaSet connections - Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems. Changes in 0.6.1 ================ - Fix for replicaSet connections Changes in 0.6 ================ - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 - Added support for covered indexes when inheritance is off - No longer always upsert on save for items with a '_id' - Error raised if update doesn't have an operation - DeReferencing is now thread safe - Errors raised if trying to perform a join in a query - Updates can now take __raw__ queries - Added custom 2D index declarations - Added replicaSet connection support - Updated deprecated imports from pymongo (safe for pymongo 2.2) - Added uri support for connections - Added scalar for efficiently returning partial data values (aliased to values_list) - Fixed limit skip bug - Improved Inheritance / Mixin - Added sharding support - Added pymongo 2.1 support - Fixed Abstract documents can now declare indexes - Added db_alias support to individual documents - Fixed GridFS documents can now be pickled - Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field - Added InvalidQueryError when calling with_id with a filter - Added support for DBRefs in distinct() - Fixed issue saving False booleans - Fixed issue with dynamic documents deltas - Added Reverse Delete Rule support to ListFields - MapFields aren't supported - Added customisable cascade kwarg options - Fixed Handle None values for non-required fields - Removed Document._get_subclasses() - no longer required - Fixed bug requiring subclasses when not actually needed - Fixed deletion of dynamic data - Added support for the $elementMatch operator - Added reverse option to SortedListFields - Fixed dereferencing - multi directional list dereferencing - Fixed issue creating indexes with recursive embedded documents - Fixed recursive lookup in _unique_with_indexes - Fixed passing ComplexField defaults to constructor for ReferenceFields - Fixed validation of DictField Int keys - Added optional cascade saving - Fixed dereferencing - max_depth now taken into account - Fixed document mutation saving issue - Fixed positional operator when replacing embedded documents - Added Non-Django Style choices back (you can have either) - Fixed __repr__ of a sliced queryset - Added recursive validation error of documents / complex fields - Fixed breaking during queryset iteration - Added pre and post bulk-insert signals - Added ImageField - requires PIL - Fixed Reference Fields can be None in get_or_create / queries - Fixed accessing pk on an embedded document - Fixed calling a queryset after drop_collection now recreates the collection - Add field name to validation exception messages - Added UUID field - Improved efficiency of .get() - Updated ComplexFields so if required they won't accept empty lists / dicts - Added spec file for rpm-based distributions - Fixed ListField so it doesnt accept strings - Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas Changes in v0.5.2 ================= - A Robust Circular reference bugfix Changes in v0.5.1 ================= - Fixed simple circular reference bug Changes in v0.5 =============== - Added InvalidDocumentError - so Document core methods can't be overwritten - Added GenericEmbeddedDocument - so you can embed any type of embeddable document - Added within_polygon support - for those with mongodb 1.9 - Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments - Added where() - filter to allowing users to specify query expressions as Javascript - Added SequenceField - for creating sequential counters - Added update() convenience method to a document - Added cascading saves - so changes to Referenced documents are saved on .save() - Added select_related() support - Added support for the positional operator - Updated geo index checking to be recursive and check in embedded documents - Updated default collection naming convention - Added Document Mixin support - Fixed queryet __repr__ mid iteration - Added hint() support, so cantell Mongo the proper index to use for the query - Fixed issue with inconsitent setting of _cls breaking inherited referencing - Added help_text and verbose_name to fields to help with some form libs - Updated item_frequencies to handle embedded document lookups - Added delta tracking now only sets / unsets explicitly changed fields - Fixed saving so sets updated values rather than overwrites - Added ComplexDateTimeField - Handles datetimes correctly with microseconds - Added ComplexBaseField - for improved flexibility and performance - Added get_FIELD_display() method for easy choice field displaying - Added queryset.slave_okay(enabled) method - Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable - Added insert method for bulk inserts - Added blinker signal support - Added query_counter context manager for tests - Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments) - Added inline_map_reduce option to map_reduce - Updated connection exception so it provides more info on the cause. - Added searching multiple levels deep in ``DictField`` - Added ``DictField`` entries containing strings to use matching operators - Added ``MapField``, similar to ``DictField`` - Added Abstract Base Classes - Added Custom Objects Managers - Added sliced subfields updating - Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry - Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create`` - Added slicing / subarray fetching controls - Fixed various unique index and other index issues - Fixed threaded connection issues - Added spherical geospatial query operators - Updated queryset to handle latest version of pymongo map_reduce now requires an output. - Added ``Document`` __hash__, __ne__ for pickling - Added ``FileField`` optional size arg for read method - Fixed ``FileField`` seek and tell methods for reading files - Added ``QuerySet.clone`` to support copying querysets - Fixed item_frequencies when using name thats the same as a native js function - Added reverse delete rules - Fixed issue with unset operation - Fixed Q-object bug - Added ``QuerySet.all_fields`` resets previous .only() and .exclude() - Added ``QuerySet.exclude`` - Added django style choices - Fixed order and filter issue - Added ``QuerySet.only`` subfield support - Added creation_counter to ``BaseField`` allowing fields to be sorted in the way the user has specified them - Fixed various errors - Added many tests Changes in v0.4 =============== - Added ``GridFSStorage`` Django storage backend - Added ``FileField`` for GridFS support - New Q-object implementation, which is no longer based on Javascript - Added ``SortedListField`` - Added ``EmailField`` - Added ``GeoPointField`` - Added ``exact`` and ``iexact`` match operators to ``QuerySet`` - Added ``get_document_or_404`` and ``get_list_or_404`` Django shortcuts - Added new query operators for Geo queries - Added ``not`` query operator - Added new update operators: ``pop`` and ``add_to_set`` - Added ``__raw__`` query parameter - Added support for custom querysets - Fixed document inheritance primary key issue - Added support for querying by array element position - Base class can now be defined for ``DictField`` - Fixed MRO error that occured on document inheritance - Added ``QuerySet.distinct``, ``QuerySet.create``, ``QuerySet.snapshot``, ``QuerySet.timeout`` and ``QuerySet.all`` - Subsequent calls to ``connect()`` now work - Introduced ``min_length`` for ``StringField`` - Fixed multi-process connection issue - Other minor fixes Changes in v0.3 =============== - Added MapReduce support - Added ``contains``, ``startswith`` and ``endswith`` query operators (and case-insensitive versions that are prefixed with 'i') - Deprecated fields' ``name`` parameter, replaced with ``db_field`` - Added ``QuerySet.only`` for only retrieving specific fields - Added ``QuerySet.in_bulk()`` for bulk querying using ids - ``QuerySet``\ s now have a ``rewind()`` method, which is called automatically when the iterator is exhausted, allowing ``QuerySet``\ s to be reused - Added ``DictField`` - Added ``URLField`` - Added ``DecimalField`` - Added ``BinaryField`` - Added ``GenericReferenceField`` - Added ``get()`` and ``get_or_create()`` methods to ``QuerySet`` - ``ReferenceField``\ s may now reference the document they are defined on (recursive references) and documents that have not yet been defined - ``Document`` objects may now be compared for equality (equal if _ids are equal and documents are of same type) - ``QuerySet`` update methods now have an ``upsert`` parameter - Added field name substitution for Javascript code (allows the user to use the Python names for fields in JS, which are later substituted for the real field names) - ``Q`` objects now support regex querying - Fixed bug where referenced documents within lists weren't properly dereferenced - ``ReferenceField``\ s may now be queried using their _id - Fixed bug where ``EmbeddedDocuments`` couldn't be non-polymorphic - ``queryset_manager`` functions now accept two arguments -- the document class as the first and the queryset as the second - Fixed bug where ``QuerySet.exec_js`` ignored ``Q`` objects - Other minor fixes Changes in v0.2.2 ================= - Fixed bug that prevented indexes from being used on ``ListField``\ s - ``Document.filter()`` added as an alias to ``Document.__call__()`` - ``validate()`` may now be used on ``EmbeddedDocument``\ s Changes in v0.2.1 ================= - Added a MongoEngine backend for Django sessions - Added ``force_insert`` to ``Document.save()`` - Improved querying syntax for ``ListField`` and ``EmbeddedDocumentField`` - Added support for user-defined primary keys (``_id`` in MongoDB) Changes in v0.2 =============== - Added ``Q`` class for building advanced queries - Added ``QuerySet`` methods for atomic updates to documents - Fields may now specify ``unique=True`` to enforce uniqueness across a collection - Added option for default document ordering - Fixed bug in index definitions Changes in v0.1.3 ================= - Added Django authentication backend - Added ``Document.meta`` support for indexes, which are ensured just before querying takes place - A few minor bugfixes Changes in v0.1.2 ================= - Query values may be processed before before being used in queries - Made connections lazy - Fixed bug in Document dictionary-style access - Added ``BooleanField`` - Added ``Document.reload()`` method Changes in v0.1.1 ================= - Documents may now use capped collections MongoEngine-mongoengine-7a1b110/docs/code/000077500000000000000000000000001177143177100204135ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/docs/code/tumblelog.py000066400000000000000000000031201177143177100227530ustar00rootroot00000000000000from mongoengine import * connect('tumblelog') class Comment(EmbeddedDocument): content = StringField() name = StringField(max_length=120) class User(Document): email = StringField(required=True) first_name = StringField(max_length=50) last_name = StringField(max_length=50) class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User) tags = ListField(StringField(max_length=30)) comments = ListField(EmbeddedDocumentField(Comment)) class TextPost(Post): content = StringField() class ImagePost(Post): image_path = StringField() class LinkPost(Post): link_url = StringField() Post.drop_collection() john = User(email='jdoe@example.com', first_name='John', last_name='Doe') john.save() post1 = TextPost(title='Fun with MongoEngine', author=john) post1.content = 'Took a look at MongoEngine today, looks pretty cool.' post1.tags = ['mongodb', 'mongoengine'] post1.save() post2 = LinkPost(title='MongoEngine Documentation', author=john) post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' post2.tags = ['mongoengine'] post2.save() print 'ALL POSTS' print for post in Post.objects: print post.title print '=' * len(post.title) if isinstance(post, TextPost): print post.content if isinstance(post, LinkPost): print 'Link:', post.link_url print print print 'POSTS TAGGED \'MONGODB\'' print for post in Post.objects(tags='mongodb'): print post.title print num_posts = Post.objects(tags='mongodb').count() print 'Found %d posts with tag "mongodb"' % num_posts MongoEngine-mongoengine-7a1b110/docs/conf.py000066400000000000000000000144251177143177100210060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # MongoEngine documentation build configuration file, created by # sphinx-quickstart on Sun Nov 22 18:14:13 2009. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append(os.path.abspath('..')) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'MongoEngine' copyright = u'2009-2012, MongoEngine Authors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # import mongoengine # The short X.Y version. version = mongoengine.get_version() # The full version, including alpha/beta/rc tags. release = mongoengine.get_version() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'nature' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['_themes'] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'MongoEnginedoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). latex_paper_size = 'a4' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'MongoEngine.tex', u'MongoEngine Documentation', u'Harry Marr', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True MongoEngine-mongoengine-7a1b110/docs/django.rst000066400000000000000000000065221177143177100215020ustar00rootroot00000000000000============================= Using MongoEngine with Django ============================= .. note :: Updated to support Django 1.4 Connecting ========== In your **settings.py** file, ignore the standard database settings (unless you also plan to use the ORM in your project), and instead call :func:`~mongoengine.connect` somewhere in the settings module. Authentication ============== MongoEngine includes a Django authentication backend, which uses MongoDB. The :class:`~mongoengine.django.auth.User` model is a MongoEngine :class:`~mongoengine.Document`, but implements most of the methods and attributes that the standard Django :class:`User` model does - so the two are moderately compatible. Using this backend will allow you to store users in MongoDB but still use many of the Django authentication infrastucture (such as the :func:`login_required` decorator and the :func:`authenticate` function). To enable the MongoEngine auth backend, add the following to you **settings.py** file:: AUTHENTICATION_BACKENDS = ( 'mongoengine.django.auth.MongoEngineBackend', ) The :mod:`~mongoengine.django.auth` module also contains a :func:`~mongoengine.django.auth.get_user` helper function, that takes a user's :attr:`id` and returns a :class:`~mongoengine.django.auth.User` object. .. versionadded:: 0.1.3 Sessions ======== Django allows the use of different backend stores for its sessions. MongoEngine provides a MongoDB-based session backend for Django, which allows you to use sessions in you Django application with just MongoDB. To enable the MongoEngine session backend, ensure that your settings module has ``'django.contrib.sessions.middleware.SessionMiddleware'`` in the ``MIDDLEWARE_CLASSES`` field and ``'django.contrib.sessions'`` in your ``INSTALLED_APPS``. From there, all you need to do is add the following line into you settings module:: SESSION_ENGINE = 'mongoengine.django.sessions' .. versionadded:: 0.2.1 Storage ======= With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, it is useful to have a Django file storage backend that wraps this. The new storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. Using it is very similar to using the default FileSystemStorage.:: from mongoengine.django.storage import GridFSStorage fs = GridFSStorage() filename = fs.save('hello.txt', 'Hello, World!') All of the `Django Storage API methods `_ have been implemented except :func:`path`. If the filename provided already exists, an underscore and a number (before # the file extension, if one exists) will be appended to the filename until the generated filename doesn't exist. The :func:`save` method will return the new filename.:: >>> fs.exists('hello.txt') True >>> fs.open('hello.txt').read() 'Hello, World!' >>> fs.size('hello.txt') 13 >>> fs.url('hello.txt') 'http://your_media_url/hello.txt' >>> fs.open('hello.txt').name 'hello.txt' >>> fs.listdir() ([], [u'hello.txt']) All files will be saved and retrieved in GridFS via the :class::`FileDocument` document, allowing easy access to the files without the GridFSStorage backend.:: >>> from mongoengine.django.storage import FileDocument >>> FileDocument.objects() [] .. versionadded:: 0.4 MongoEngine-mongoengine-7a1b110/docs/guide/000077500000000000000000000000001177143177100205765ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/docs/guide/connecting.rst000066400000000000000000000044141177143177100234620ustar00rootroot00000000000000.. _guide-connecting: ===================== Connecting to MongoDB ===================== To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect` function. The first argument is the name of the database to connect to. If the database does not exist, it will be created. If the database requires authentication, :attr:`username` and :attr:`password` arguments may be provided:: from mongoengine import connect connect('project1', username='webapp', password='pwd123') By default, MongoEngine assumes that the :program:`mongod` instance is running on **localhost** on port **27017**. If MongoDB is running elsewhere, you may provide :attr:`host` and :attr:`port` arguments to :func:`~mongoengine.connect`:: connect('project1', host='192.168.1.35', port=12345) Uri style connections are also supported as long as you include the database name - just supply the uri as the :attr:`host` to :func:`~mongoengine.connect`:: connect('project1', host='mongodb://localhost/database_name') ReplicaSets =========== MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection` to use them please use a URI style connection and provide the `replicaSet` name in the connection kwargs. Multiple Databases ================== Multiple database support was added in MongoEngine 0.6. To use multiple databases you can use :func:`~mongoengine.connect` and provide an `alias` name for the connection - if no `alias` is provided then "default" is used. In the background this uses :func:`~mongoengine.register_connection` to store the data and you can register all aliases up front if required. Individual documents can also support multiple databases by providing a `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` objects to point across databases and collections. Below is an example schema, using 3 different databases to store data:: class User(Document): name = StringField() meta = {"db_alias": "user-db"} class Book(Document): name = StringField() meta = {"db_alias": "book-db"} class AuthorBooks(Document): author = ReferenceField(User) book = ReferenceField(Book) meta = {"db_alias": "users-books-db"} MongoEngine-mongoengine-7a1b110/docs/guide/defining-documents.rst000066400000000000000000000515011177143177100251140ustar00rootroot00000000000000================== Defining documents ================== In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When working with relational databases, rows are stored in **tables**, which have a strict **schema** that the rows follow. MongoDB stores documents in **collections** rather than tables - the principle difference is that no schema is enforced at a database level. Defining a document's schema ============================ MongoEngine allows you to define schemata for documents as this helps to reduce coding errors, and allows for utility methods to be defined on fields which may be present. To define a schema for a document, create a class that inherits from :class:`~mongoengine.Document`. Fields are specified by adding **field objects** as class attributes to the document class:: from mongoengine import * import datetime class Page(Document): title = StringField(max_length=200, required=True) date_modified = DateTimeField(default=datetime.datetime.now) Dynamic document schemas ======================== One of the benefits of MongoDb is dynamic schemas for a collection, whilst data should be planned and organised (after all explicit is better than implicit!) there are scenarios where having dynamic / expando style documents is desirable. :class:`~mongoengine.DynamicDocument` documents work in the same way as :class:`~mongoengine.Document` but any data / attributes set to them will also be saved :: from mongoengine import * class Page(DynamicDocument): title = StringField(max_length=200, required=True) # Create a new page and add tags >>> page = Page(title='Using MongoEngine') >>> page.tags = ['mongodb', 'mongoengine'] >>> page.save() >>> Page.objects(tags='mongoengine').count() >>> 1 ..note:: There is one caveat on Dynamic Documents: fields cannot start with `_` Fields ====== By default, fields are not required. To make a field mandatory, set the :attr:`required` keyword argument of a field to ``True``. Fields also may have validation constraints available (such as :attr:`max_length` in the example above). Fields may also take default values, which will be used if a value is not provided. Default values may optionally be a callable, which will be called to retrieve the value (such as in the above example). The field types available are as follows: * :class:`~mongoengine.StringField` * :class:`~mongoengine.URLField` * :class:`~mongoengine.EmailField` * :class:`~mongoengine.IntField` * :class:`~mongoengine.FloatField` * :class:`~mongoengine.DecimalField` * :class:`~mongoengine.DateTimeField` * :class:`~mongoengine.ComplexDateTimeField` * :class:`~mongoengine.ListField` * :class:`~mongoengine.SortedListField` * :class:`~mongoengine.DictField` * :class:`~mongoengine.MapField` * :class:`~mongoengine.ObjectIdField` * :class:`~mongoengine.ReferenceField` * :class:`~mongoengine.GenericReferenceField` * :class:`~mongoengine.EmbeddedDocumentField` * :class:`~mongoengine.GenericEmbeddedDocumentField` * :class:`~mongoengine.BooleanField` * :class:`~mongoengine.FileField` * :class:`~mongoengine.BinaryField` * :class:`~mongoengine.GeoPointField` * :class:`~mongoengine.SequenceField` Field arguments --------------- Each field type can be customized by keyword arguments. The following keyword arguments can be set on all fields: :attr:`db_field` (Default: None) The MongoDB field name. :attr:`name` (Default: None) The mongoengine field name. :attr:`required` (Default: False) If set to True and the field is not set on the document instance, a :class:`~mongoengine.ValidationError` will be raised when the document is validated. :attr:`default` (Default: None) A value to use when no value is set for this field. The definion of default parameters follow `the general rules on Python `__, which means that some care should be taken when dealing with default mutable objects (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: class ExampleFirst(Document): # Default an empty list values = ListField(IntField(), default=list) class ExampleSecond(Document): # Default a set of values values = ListField(IntField(), default=lambda: [1,2,3]) class ExampleDangerous(Document): # This can make an .append call to add values to the default (and all the following objects), # instead to just an object values = ListField(IntField(), default=[1,2,3]) :attr:`unique` (Default: False) When True, no documents in the collection will have the same value for this field. :attr:`unique_with` (Default: None) A field name (or list of field names) that when taken together with this field, will not have two documents in the collection with the same value. :attr:`primary_key` (Default: False) When True, use this field as a primary key for the collection. :attr:`choices` (Default: None) An iterable (e.g. a list or tuple) of choices to which the value of this field should be limited. Can be either be a nested tuples of value (stored in mongo) and a human readable key :: SIZE = (('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large')) class Shirt(Document): size = StringField(max_length=3, choices=SIZE) Or a flat iterable just containing values :: SIZE = ('S', 'M', 'L', 'XL', 'XXL') class Shirt(Document): size = StringField(max_length=3, choices=SIZE) :attr:`help_text` (Default: None) Optional help text to output with the field - used by form libraries :attr:`verbose_name` (Default: None) Optional human-readable name for the field - used by form libraries List fields ----------- MongoDB allows the storage of lists of items. To add a list of items to a :class:`~mongoengine.Document`, use the :class:`~mongoengine.ListField` field type. :class:`~mongoengine.ListField` takes another field object as its first argument, which specifies which type elements may be stored within the list:: class Page(Document): tags = ListField(StringField(max_length=50)) Embedded documents ------------------ MongoDB has the ability to embed documents within other documents. Schemata may be defined for these embedded documents, just as they may be for regular documents. To create an embedded document, just define a document as usual, but inherit from :class:`~mongoengine.EmbeddedDocument` rather than :class:`~mongoengine.Document`:: class Comment(EmbeddedDocument): content = StringField() To embed the document within another document, use the :class:`~mongoengine.EmbeddedDocumentField` field type, providing the embedded document class as the first argument:: class Page(Document): comments = ListField(EmbeddedDocumentField(Comment)) comment1 = Comment(content='Good work!') comment2 = Comment(content='Nice article!') page = Page(comments=[comment1, comment2]) Dictionary Fields ----------------- Often, an embedded document may be used instead of a dictionary -- generally this is recommended as dictionaries don't support validation or custom field types. However, sometimes you will not know the structure of what you want to store; in this situation a :class:`~mongoengine.DictField` is appropriate:: class SurveyResponse(Document): date = DateTimeField() user = ReferenceField(User) answers = DictField() survey_response = SurveyResponse(date=datetime.now(), user=request.user) response_form = ResponseForm(request.POST) survey_response.answers = response_form.cleaned_data() survey_response.save() Dictionaries can store complex data, other dictionaries, lists, references to other objects, so are the most flexible field type available. Reference fields ---------------- References may be stored to other documents in the database using the :class:`~mongoengine.ReferenceField`. Pass in another document class as the first argument to the constructor, then simply assign document objects to the field:: class User(Document): name = StringField() class Page(Document): content = StringField() author = ReferenceField(User) john = User(name="John Smith") john.save() post = Page(content="Test Page") post.author = john post.save() The :class:`User` object is automatically turned into a reference behind the scenes, and dereferenced when the :class:`Page` object is retrieved. To add a :class:`~mongoengine.ReferenceField` that references the document being defined, use the string ``'self'`` in place of the document class as the argument to :class:`~mongoengine.ReferenceField`'s constructor. To reference a document that has not yet been defined, use the name of the undefined document as the constructor's argument:: class Employee(Document): name = StringField() boss = ReferenceField('self') profile_page = ReferenceField('ProfilePage') class ProfilePage(Document): content = StringField() Dealing with deletion of referred documents ''''''''''''''''''''''''''''''''''''''''''' By default, MongoDB doesn't check the integrity of your data, so deleting documents that other documents still hold references to will lead to consistency issues. Mongoengine's :class:`ReferenceField` adds some functionality to safeguard against these kinds of database integrity problems, providing each reference with a delete rule specification. A delete rule is specified by supplying the :attr:`reverse_delete_rule` attributes on the :class:`ReferenceField` definition, like this:: class Employee(Document): ... profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) The declaration in this example means that when an :class:`Employee` object is removed, the :class:`ProfilePage` that belongs to that employee is removed as well. If a whole batch of employees is removed, all profile pages that are linked are removed as well. Its value can take any of the following constants: :const:`mongoengine.DO_NOTHING` This is the default and won't do anything. Deletes are fast, but may cause database inconsistency or dangling references. :const:`mongoengine.DENY` Deletion is denied if there still exist references to the object being deleted. :const:`mongoengine.NULLIFY` Any object's fields still referring to the object being deleted are removed (using MongoDB's "unset" operation), effectively nullifying the relationship. :const:`mongoengine.CASCADE` Any object containing fields that are refererring to the object being deleted are deleted first. :const:`mongoengine.PULL` Removes the reference to the object (using MongoDB's "pull" operation) from any object's fields of :class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). .. warning:: A safety note on setting up these delete rules! Since the delete rules are not recorded on the database level by MongoDB itself, but instead at runtime, in-memory, by the MongoEngine module, it is of the upmost importance that the module that declares the relationship is loaded **BEFORE** the delete is invoked. If, for example, the :class:`Employee` object lives in the :mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people` app, it is extremely important that the :mod:`people` app is loaded before any employee is removed, because otherwise, MongoEngine could never know this relationship exists. In Django, be sure to put all apps that have such delete rule declarations in their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. Generic reference fields '''''''''''''''''''''''' A second kind of reference field also exists, :class:`~mongoengine.GenericReferenceField`. This allows you to reference any kind of :class:`~mongoengine.Document`, and hence doesn't take a :class:`~mongoengine.Document` subclass as a constructor argument:: class Link(Document): url = StringField() class Post(Document): title = StringField() class Bookmark(Document): bookmark_object = GenericReferenceField() link = Link(url='http://hmarr.com/mongoengine/') link.save() post = Post(title='Using MongoEngine') post.save() Bookmark(bookmark_object=link).save() Bookmark(bookmark_object=post).save() .. note:: Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if you will only be referencing one document type, prefer the standard :class:`~mongoengine.ReferenceField`. Uniqueness constraints ---------------------- MongoEngine allows you to specify that a field should be unique across a collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's constructor. If you try to save a document that has the same value for a unique field as a document that is already in the database, a :class:`~mongoengine.OperationError` will be raised. You may also specify multi-field uniqueness constraints by using :attr:`unique_with`, which may be either a single field name, or a list or tuple of field names:: class User(Document): username = StringField(unique=True) first_name = StringField() last_name = StringField(unique_with='first_name') Skipping Document validation on save ------------------------------------ You can also skip the whole document validation process by setting ``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` method:: class Recipient(Document): name = StringField() email = EmailField() recipient = Recipient(name='admin', email='root@localhost') recipient.save() # will raise a ValidationError while recipient.save(validate=False) # won't Document collections ==================== Document classes that inherit **directly** from :class:`~mongoengine.Document` will have their own **collection** in the database. The name of the collection is by default the name of the class, coverted to lowercase (so in the example above, the collection would be called `page`). If you need to change the name of the collection (e.g. to use MongoEngine with an existing database), then create a class dictionary attribute called :attr:`meta` on your document, and set :attr:`collection` to the name of the collection that you want your document class to use:: class Page(Document): title = StringField(max_length=200, required=True) meta = {'collection': 'cmsPage'} Capped collections ------------------ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. :attr:`max_documents` is the maximum number of documents that is allowed to be stored in the collection, and :attr:`max_size` is the maximum size of the collection in bytes. If :attr:`max_size` is not specified and :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). The following example shows a :class:`Log` document that will be limited to 1000 entries and 2MB of disk space:: class Log(Document): ip_address = StringField() meta = {'max_documents': 1000, 'max_size': 2000000} Indexes ======= You can specify indexes on collections to make querying faster. This is done by creating a list of index specifications called :attr:`indexes` in the :attr:`~mongoengine.Document.meta` dictionary, where an index specification may either be a single field name, a tuple containing multiple field names, or a dictionary containing a full index definition. A direction may be specified on fields by prefixing the field name with a **+** or a **-** sign. Note that direction only matters on multi-field indexes. :: class Page(Document): title = StringField() rating = StringField() meta = { 'indexes': ['title', ('title', '-rating')] } If a dictionary is passed then the following options are available: :attr:`fields` (Default: None) The fields to index. Specified in the same format as described above. :attr:`types` (Default: True) Whether the index should have the :attr:`_types` field added automatically to the start of the index. :attr:`sparse` (Default: False) Whether the index should be sparse. :attr:`unique` (Default: False) Whether the index should be sparse. .. warning:: Inheritance adds extra indices. If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`. Geospatial indexes --------------------------- Geospatial indexes will be automatically created for all :class:`~mongoengine.GeoPointField`\ s It is also possible to explicitly define geospatial indexes. This is useful if you need to define a geospatial index on a subfield of a :class:`~mongoengine.DictField` or a custom field that contains a point. To create a geospatial index you must prefix the field with the ***** sign. :: class Place(Document): location = DictField() meta = { 'indexes': [ '*location.point', ], } Ordering ======== A default ordering can be specified for your :class:`~mongoengine.queryset.QuerySet` using the :attr:`ordering` attribute of :attr:`~mongoengine.Document.meta`. Ordering will be applied when the :class:`~mongoengine.queryset.QuerySet` is created, and can be overridden by subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: from datetime import datetime class BlogPost(Document): title = StringField() published_date = DateTimeField() meta = { 'ordering': ['-published_date'] } blog_post_1 = BlogPost(title="Blog Post #1") blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0) blog_post_2 = BlogPost(title="Blog Post #2") blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0) blog_post_3 = BlogPost(title="Blog Post #3") blog_post_3.published_date = datetime(2010, 1, 7, 0, 0 ,0) blog_post_1.save() blog_post_2.save() blog_post_3.save() # get the "first" BlogPost using default ordering # from BlogPost.meta.ordering latest_post = BlogPost.objects.first() assert latest_post.title == "Blog Post #3" # override default ordering, order BlogPosts by "published_date" first_post = BlogPost.objects.order_by("+published_date").first() assert first_post.title == "Blog Post #1" Shard keys ========== If your collection is sharded, then you need to specify the shard key as a tuple, using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`. This ensures that the shard key is sent with the query when calling the :meth:`~mongoengine.document.Document.save` or :meth:`~mongoengine.document.Document.update` method on an existing :class:`-mongoengine.Document` instance:: class LogEntry(Document): machine = StringField() app = StringField() timestamp = DateTimeField() data = StringField() meta = { 'shard_key': ('machine', 'timestamp',) } .. _document-inheritance: Document inheritance ==================== To create a specialised type of a :class:`~mongoengine.Document` you have defined, you may subclass it and add any extra fields or methods you may need. As this is new class is not a direct subclass of :class:`~mongoengine.Document`, it will not be stored in its own collection; it will use the same collection as its superclass uses. This allows for more convenient and efficient retrieval of related documents:: # Stored in a collection named 'page' class Page(Document): title = StringField(max_length=200, required=True) meta = {'allow_inheritance': True} # Also stored in the collection named 'page' class DatedPage(Page): date = DateTimeField() .. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta. Working with existing data -------------------------- To enable correct retrieval of documents involved in this kind of heirarchy, two extra attributes are stored on each document in the database: :attr:`_cls` and :attr:`_types`. These are hidden from the user through the MongoEngine interface, but may not be present if you are trying to use MongoEngine with an existing database. For this reason, you may disable this inheritance mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling you to work with existing databases. To disable inheritance on a document class, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` dictionary:: # Will work with data in an existing collection named 'cmsPage' class Page(Document): title = StringField(max_length=200, required=True) meta = { 'collection': 'cmsPage', 'allow_inheritance': False, } MongoEngine-mongoengine-7a1b110/docs/guide/document-instances.rst000066400000000000000000000065261177143177100251440ustar00rootroot00000000000000=================== Documents instances =================== To create a new document object, create an instance of the relevant document class, providing values for its fields as its constructor keyword arguments. You may provide values for any of the fields on the document:: >>> page = Page(title="Test Page") >>> page.title 'Test Page' You may also assign values to the document's fields using standard object attribute syntax:: >>> page.title = "Example Page" >>> page.title 'Example Page' Saving and deleting documents ============================= MongoEngine tracks changes to documents to provide efficient saving. To save the document to the database, call the :meth:`~mongoengine.Document.save` method. If the document does not exist in the database, it will be created. If it does already exist, then any changes will be updated atomically. For example:: >>> page = Page(title="Test Page") >>> page.save() # Performs an insert >>> page.title = "My Page" >>> page.save() # Performs an atomic set on the title field. .. note:: Changes to documents are tracked and on the whole perform `set` operations. * ``list_field.pop(0)`` - *sets* the resulting list * ``del(list_field)`` - *unsets* whole list .. seealso:: :ref:`guide-atomic-updates` Cascading Saves --------------- If your document contains :class:`~mongoengine.ReferenceField` or :class:`~mongoengine.GenericReferenceField` objects, then by default the :meth:`~mongoengine.Document.save` method will automatically save any changes to those objects as well. If this is not desired passing :attr:`cascade` as False to the save method turns this feature off. Deleting documents ------------------ To delete a document, call the :meth:`~mongoengine.Document.delete` method. Note that this will only work if the document exists in the database and has a valid :attr:`id`. Document IDs ============ Each document in the database has a unique id. This may be accessed through the :attr:`id` attribute on :class:`~mongoengine.Document` objects. Usually, the id will be generated automatically by the database server when the object is save, meaning that you may only access the :attr:`id` field once a document has been saved:: >>> page = Page(title="Test Page") >>> page.id >>> page.save() >>> page.id ObjectId('123456789abcdef000000000') Alternatively, you may define one of your own fields to be the document's "primary key" by providing ``primary_key=True`` as a keyword argument to a field's constructor. Under the hood, MongoEngine will use this field as the :attr:`id`; in fact :attr:`id` is actually aliased to your primary key field so you may still use :attr:`id` to access the primary key if you want:: >>> class User(Document): ... email = StringField(primary_key=True) ... name = StringField() ... >>> bob = User(email='bob@example.com', name='Bob') >>> bob.save() >>> bob.id == bob.email == 'bob@example.com' True You can also access the document's "primary key" using the :attr:`pk` field; in is an alias to :attr:`id`:: >>> page = Page(title="Another Test Page") >>> page.save() >>> page.id == page.pk .. note:: If you define your own primary key field, the field implicitly becomes required, so a :class:`~mongoengine.ValidationError` will be thrown if you don't provide it. MongoEngine-mongoengine-7a1b110/docs/guide/gridfs.rst000066400000000000000000000047401177143177100226130ustar00rootroot00000000000000====== GridFS ====== .. versionadded:: 0.4 Writing ------- GridFS support comes in the form of the :class:`~mongoengine.FileField` field object. This field acts as a file-like object and provides a couple of different ways of inserting and retrieving data. Arbitrary metadata such as content type can also be stored alongside the files. In the following example, a document is created to store details about animals, including a photo:: class Animal(Document): genus = StringField() family = StringField() photo = FileField() marmot = Animal('Marmota', 'Sciuridae') marmot_photo = open('marmot.jpg', 'r') # Retrieve a photo from disk marmot.photo = marmot_photo # Store photo in the document marmot.photo.content_type = 'image/jpeg' # Store metadata marmot.save() Another way of writing to a :class:`~mongoengine.FileField` is to use the :func:`put` method. This allows for metadata to be stored in the same call as the file:: marmot.photo.put(marmot_photo, content_type='image/jpeg') marmot.save() Retrieval --------- So using the :class:`~mongoengine.FileField` is just like using any other field. The file can also be retrieved just as easily:: marmot = Animal.objects(genus='Marmota').first() photo = marmot.photo.read() content_type = marmot.photo.content_type Streaming --------- Streaming data into a :class:`~mongoengine.FileField` is achieved in a slightly different manner. First, a new file must be created by calling the :func:`new_file` method. Data can then be written using :func:`write`:: marmot.photo.new_file() marmot.photo.write('some_image_data') marmot.photo.write('some_more_image_data') marmot.photo.close() marmot.photo.save() Deletion -------- Deleting stored files is achieved with the :func:`delete` method:: marmot.photo.delete() .. warning:: The FileField in a Document actually only stores the ID of a file in a separate GridFS collection. This means that deleting a document with a defined FileField does not actually delete the file. You must be careful to delete any files in a Document as above before deleting the Document itself. Replacing files --------------- Files can be replaced with the :func:`replace` method. This works just like the :func:`put` method so even metadata can (and should) be replaced:: another_marmot = open('another_marmot.png', 'r') marmot.photo.replace(another_marmot, content_type='image/png') MongoEngine-mongoengine-7a1b110/docs/guide/index.rst000066400000000000000000000002511177143177100224350ustar00rootroot00000000000000========== User Guide ========== .. toctree:: :maxdepth: 2 installing connecting defining-documents document-instances querying gridfs signals MongoEngine-mongoengine-7a1b110/docs/guide/installing.rst000066400000000000000000000017231177143177100234770ustar00rootroot00000000000000====================== Installing MongoEngine ====================== To use MongoEngine, you will need to download `MongoDB `_ and ensure it is running in an accessible location. You will also need `PyMongo `_ to use MongoEngine, but if you install MongoEngine using setuptools, then the dependencies will be handled for you. MongoEngine is available on PyPI, so to use it you can use :program:`pip`: .. code-block:: console $ pip install mongoengine Alternatively, if you don't have setuptools installed, `download it from PyPi `_ and run .. code-block:: console $ python setup.py install To use the bleeding-edge version of MongoEngine, you can get the source from `GitHub `_ and install it as above: .. code-block:: console $ git clone git://github.com/hmarr/mongoengine $ cd mongoengine $ python setup.py install MongoEngine-mongoengine-7a1b110/docs/guide/querying.rst000066400000000000000000000537451177143177100232110ustar00rootroot00000000000000===================== Querying the database ===================== :class:`~mongoengine.Document` classes have an :attr:`objects` attribute, which is used for accessing the objects in the database associated with the class. The :attr:`objects` attribute is actually a :class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new :class:`~mongoengine.queryset.QuerySet` object on access. The :class:`~mongoengine.queryset.QuerySet` object may be iterated over to fetch documents from the database:: # Prints out the names of all the users in the database for user in User.objects: print user.name .. note:: Once the iteration finishes (when :class:`StopIteration` is raised), :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The results of the first iteration are *not* cached, so the database will be hit each time the :class:`~mongoengine.queryset.QuerySet` is iterated over. Filtering queries ================= The query may be filtered by calling the :class:`~mongoengine.queryset.QuerySet` object with field lookup keyword arguments. The keys in the keyword arguments correspond to fields on the :class:`~mongoengine.Document` you are querying:: # This will return a QuerySet that will only iterate over users whose # 'country' field is set to 'uk' uk_users = User.objects(country='uk') Fields on embedded documents may also be referred to using field lookup syntax by using a double-underscore in place of the dot in object attribute access syntax:: # This will return a QuerySet that will only iterate over pages that have # been written by a user whose 'country' field is set to 'uk' uk_pages = Page.objects(author__country='uk') Query operators =============== Operators other than equality may also be used in queries; just attach the operator name to a key with a double-underscore:: # Only find users whose age is 18 or less young_users = Users.objects(age__lte=18) Available operators are as follows: * ``ne`` -- not equal to * ``lt`` -- less than * ``lte`` -- less than or equal to * ``gt`` -- greater than * ``gte`` -- greater than or equal to * ``not`` -- negate a standard check, may be used before other operators (e.g. ``Q(age__not__mod=5)``) * ``in`` -- value is in list (a list of values should be provided) * ``nin`` -- value is not in list (a list of values should be provided) * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values * ``all`` -- every item in list of values provided is in array * ``size`` -- the size of the array is * ``exists`` -- value for field exists The following operators are available as shortcuts to querying with regular expressions: * ``exact`` -- string field exactly matches value * ``iexact`` -- string field exactly matches value (case insensitive) * ``contains`` -- string field contains value * ``icontains`` -- string field contains value (case insensitive) * ``startswith`` -- string field starts with value * ``istartswith`` -- string field starts with value (case insensitive) * ``endswith`` -- string field ends with value * ``iendswith`` -- string field ends with value (case insensitive) * ``match`` -- performs an $elemMatch so you can match an entire document within an array There are a few special operators for performing geographical queries, that may used with :class:`~mongoengine.GeoPointField`\ s: * ``within_distance`` -- provide a list containing a point and a maximum distance (e.g. [(41.342, -87.653), 5]) * ``within_spherical_distance`` -- Same as above but using the spherical geo model (e.g. [(41.342, -87.653), 5/earth_radius]) * ``near`` -- order the documents by how close they are to a given point * ``near_sphere`` -- Same as above but using the spherical geo model * ``within_box`` -- filter documents to those within a given bounding box (e.g. [(35.0, -125.0), (40.0, -100.0)]) * ``within_polygon`` -- filter documents to those within a given polygon (e.g. [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). .. note:: Requires Mongo Server 2.0 Querying lists -------------- On most fields, this syntax will look up documents where the field specified matches the given value exactly, but when the field refers to a :class:`~mongoengine.ListField`, a single item may be provided, in which case lists that contain that item will be matched:: class Page(Document): tags = ListField(StringField()) # This will match all pages that have the word 'coding' as an item in the # 'tags' list Page.objects(tags='coding') It is possible to query by position in a list by using a numerical value as a query operator. So if you wanted to find all pages whose first tag was ``db``, you could use the following query:: Page.objects(tags__0='db') If you only want to fetch part of a list eg: you want to paginate a list, then the `slice` operator is required:: # comments - skip 5, limit 10 Page.objects.fields(slice__comments=[5, 10]) For updating documents, if you don't know the position in a list, you can use the $ positional operator :: Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1}) However, this doesn't map well to the syntax so you can also use a capital S instead :: Post.objects(comments__by="joe").update(inc__comments__S__votes=1) .. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query. Raw queries ----------- It is possible to provide a raw PyMongo query as a query parameter, which will be integrated directly into the query. This is done using the ``__raw__`` keyword argument:: Page.objects(__raw__={'tags': 'coding'}) .. versionadded:: 0.4 Limiting and skipping results ============================= Just as with traditional ORMs, you may limit the number of results returned, or skip a number or results in you query. :meth:`~mongoengine.queryset.QuerySet.limit` and :meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on :class:`~mongoengine.queryset.QuerySet` objects, but the prefered syntax for achieving this is using array-slicing syntax:: # Only the first 5 people users = User.objects[:5] # All except for the first 5 people users = User.objects[5:] # 5 users, starting from the 10th user found users = User.objects[10:15] You may also index the query to retrieve a single result. If an item at that index does not exists, an :class:`IndexError` will be raised. A shortcut for retrieving the first result and returning :attr:`None` if no result exists is provided (:meth:`~mongoengine.queryset.QuerySet.first`):: >>> # Make sure there are no users >>> User.drop_collection() >>> User.objects[0] IndexError: list index out of range >>> User.objects.first() == None True >>> User(name='Test User').save() >>> User.objects[0] == User.objects.first() True Retrieving unique results ------------------------- To retrieve a result that should be unique in the collection, use :meth:`~mongoengine.queryset.QuerySet.get`. This will raise :class:`~mongoengine.queryset.DoesNotExist` if no document matches the query, and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one document matched the query. A variation of this method exists, :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new document with the query arguments if no documents match the query. An additional keyword argument, :attr:`defaults` may be provided, which will be used as default values for the new document, in the case that it should need to be created:: >>> a, created = User.objects.get_or_create(name='User A', defaults={'age': 30}) >>> b, created = User.objects.get_or_create(name='User A', defaults={'age': 40}) >>> a.name == b.name and a.age == b.age True Default Document queries ======================== By default, the objects :attr:`~mongoengine.Document.objects` attribute on a document returns a :class:`~mongoengine.queryset.QuerySet` that doesn't filter the collection -- it returns all objects. This may be changed by defining a method on a document that modifies a queryset. The method should accept two arguments -- :attr:`doc_cls` and :attr:`queryset`. The first argument is the :class:`~mongoengine.Document` class that the method is defined on (in this sense, the method is more like a :func:`classmethod` than a regular method), and the second argument is the initial queryset. The method needs to be decorated with :func:`~mongoengine.queryset.queryset_manager` in order for it to be recognised. :: class BlogPost(Document): title = StringField() date = DateTimeField() @queryset_manager def objects(doc_cls, queryset): # This may actually also be done by defining a default ordering for # the document, but this illustrates the use of manager methods return queryset.order_by('-date') You don't need to call your method :attr:`objects` -- you may define as many custom manager methods as you like:: class BlogPost(Document): title = StringField() published = BooleanField() @queryset_manager def live_posts(doc_cls, queryset): return queryset.filter(published=True) BlogPost(title='test1', published=False).save() BlogPost(title='test2', published=True).save() assert len(BlogPost.objects) == 2 assert len(BlogPost.live_posts) == 1 Custom QuerySets ================ Should you want to add custom methods for interacting with or filtering documents, extending the :class:`~mongoengine.queryset.QuerySet` class may be the way to go. To use a custom :class:`~mongoengine.queryset.QuerySet` class on a document, set ``queryset_class`` to the custom class in a :class:`~mongoengine.Document`\ s ``meta`` dictionary:: class AwesomerQuerySet(QuerySet): pass class Page(Document): meta = {'queryset_class': AwesomerQuerySet} .. versionadded:: 0.4 Aggregation =========== MongoDB provides some aggregation methods out of the box, but there are not as many as you typically get with an RDBMS. MongoEngine provides a wrapper around the built-in methods and provides some of its own, which are implemented as Javascript code that is executed on the database server. Counting results ---------------- Just as with limiting and skipping results, there is a method on :class:`~mongoengine.queryset.QuerySet` objects -- :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic way of achieving this:: num_users = len(User.objects) Further aggregation ------------------- You may sum over the values of a specific field on documents using :meth:`~mongoengine.queryset.QuerySet.sum`:: yearly_expense = Employee.objects.sum('salary') .. note:: If the field isn't present on a document, that document will be ignored from the sum. To get the average (mean) of a field on a collection of documents, use :meth:`~mongoengine.queryset.QuerySet.average`:: mean_age = User.objects.average('age') As MongoDB provides native lists, MongoEngine provides a helper method to get a dictionary of the frequencies of items in lists across an entire collection -- :meth:`~mongoengine.queryset.QuerySet.item_frequencies`. An example of its use would be generating "tag-clouds":: class Article(Document): tag = ListField(StringField()) # After adding some tagged articles... tag_freqs = Article.objects.item_frequencies('tag', normalize=True) from operator import itemgetter top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] Query efficiency and performance ================================ There are a couple of methods to improve efficiency when querying, reducing the information returned by the query or efficient dereferencing . Retrieving a subset of fields ----------------------------- Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, and for efficiency only these should be retrieved from the database. This issue is especially important for MongoDB, as fields may often be extremely large (e.g. a :class:`~mongoengine.ListField` of :class:`~mongoengine.EmbeddedDocument`\ s, which represent the comments on a blog post. To select only a subset of fields, use :meth:`~mongoengine.queryset.QuerySet.only`, specifying the fields you want to retrieve as its arguments. Note that if fields that are not downloaded are accessed, their default value (or :attr:`None` if no default value is provided) will be given:: >>> class Film(Document): ... title = StringField() ... year = IntField() ... rating = IntField(default=3) ... >>> Film(title='The Shawshank Redemption', year=1994, rating=5).save() >>> f = Film.objects.only('title').first() >>> f.title 'The Shawshank Redemption' >>> f.year # None >>> f.rating # default value 3 .. note:: The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of :meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field. If you later need the missing fields, just call :meth:`~mongoengine.Document.reload` on your document. Getting related data -------------------- When iterating the results of :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField` we automatically dereference any :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the number the queries to mongo. There are times when that efficiency is not enough, documents that have :class:`~mongoengine.ReferenceField` objects or :class:`~mongoengine.GenericReferenceField` objects at the top level are expensive as the number of queries to MongoDB can quickly rise. To limit the number of queries use :func:`~mongoengine.queryset.QuerySet.select_related` which converts the QuerySet to a list and dereferences as efficiently as possible. By default :func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any references to the depth of 1 level. If you have more complicated documents and want to dereference more of the object at once then increasing the :attr:`max_depth` will dereference more levels of the document. Advanced queries ================ Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword arguments can't fully express the query you want to use -- for example if you need to combine a number of constraints using *and* and *or*. This is made possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class. A :class:`~mongoengine.queryset.Q` object represents part of a query, and can be initialised using the same keyword-argument syntax you use to query documents. To build a complex query, you may combine :class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or) operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the first positional argument to :attr:`Document.objects` when you filter it by calling it with keyword arguments:: # Get published posts Post.objects(Q(published=True) | Q(publish_date__lte=datetime.now())) # Get top posts Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) .. _guide-atomic-updates: Atomic updates ============== Documents may be updated atomically by using the :meth:`~mongoengine.queryset.QuerySet.update_one` and :meth:`~mongoengine.queryset.QuerySet.update` methods on a :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" that you may use with these methods: * ``set`` -- set a particular value * ``unset`` -- delete a particular value (since MongoDB v1.3+) * ``inc`` -- increment a value by a given amount * ``dec`` -- decrement a value by a given amount * ``pop`` -- remove the last item from a list * ``push`` -- append a value to a list * ``push_all`` -- append several values to a list * ``pop`` -- remove the first or last element of a list * ``pull`` -- remove a value from a list * ``pull_all`` -- remove several values from a list * ``add_to_set`` -- add value to a list only if its not in the list already The syntax for atomic updates is similar to the querying syntax, but the modifier comes before the field, not after it:: >>> post = BlogPost(title='Test', page_views=0, tags=['database']) >>> post.save() >>> BlogPost.objects(id=post.id).update_one(inc__page_views=1) >>> post.reload() # the document has been changed, so we need to reload it >>> post.page_views 1 >>> BlogPost.objects(id=post.id).update_one(set__title='Example Post') >>> post.reload() >>> post.title 'Example Post' >>> BlogPost.objects(id=post.id).update_one(push__tags='nosql') >>> post.reload() >>> post.tags ['database', 'nosql'] .. note :: In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates on changed documents by tracking changes to that document. The positional operator allows you to update list items without knowing the index position, therefore making the update a single atomic operation. As we cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: >>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo']) >>> post.save() >>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb') >>> post.reload() >>> post.tags ['database', 'mongodb'] .. note :: Currently only top level lists are handled, future versions of mongodb / pymongo plan to support nested positional operators. See `The $ positional operator `_. Server-side javascript execution ================================ Javascript functions may be written and sent to the server for execution. The result of this is the return value of the Javascript function. This functionality is accessed through the :meth:`~mongoengine.queryset.QuerySet.exec_js` method on :meth:`~mongoengine.queryset.QuerySet` objects. Pass in a string containing a Javascript function as the first argument. The remaining positional arguments are names of fields that will be passed into you Javascript function as its arguments. This allows functions to be written that may be executed on any field in a collection (e.g. the :meth:`~mongoengine.queryset.QuerySet.sum` method, which accepts the name of the field to sum over as its argument). Note that field names passed in in this manner are automatically translated to the names used on the database (set using the :attr:`name` keyword argument to a field constructor). Keyword arguments to :meth:`~mongoengine.queryset.QuerySet.exec_js` are combined into an object called :attr:`options`, which is available in the Javascript function. This may be used for defining specific parameters for your function. Some variables are made available in the scope of the Javascript function: * ``collection`` -- the name of the collection that corresponds to the :class:`~mongoengine.Document` class that is being used; this should be used to get the :class:`Collection` object from :attr:`db` in Javascript code * ``query`` -- the query that has been generated by the :class:`~mongoengine.queryset.QuerySet` object; this may be passed into the :meth:`find` method on a :class:`Collection` object in the Javascript function * ``options`` -- an object containing the keyword arguments passed into :meth:`~mongoengine.queryset.QuerySet.exec_js` The following example demonstrates the intended usage of :meth:`~mongoengine.queryset.QuerySet.exec_js` by defining a function that sums over a field on a document (this functionality is already available throught :meth:`~mongoengine.queryset.QuerySet.sum` but is shown here for sake of example):: def sum_field(document, field_name, include_negatives=True): code = """ function(sumField) { var total = 0.0; db[collection].find(query).forEach(function(doc) { var val = doc[sumField]; if (val >= 0.0 || options.includeNegatives) { total += val; } }); return total; } """ options = {'includeNegatives': include_negatives} return document.objects.exec_js(code, field_name, **options) As fields in MongoEngine may use different names in the database (set using the :attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism exists for replacing MongoEngine field names with the database field names in Javascript code. When accessing a field on a collection object, use square-bracket notation, and prefix the MongoEngine field name with a tilde. The field name that follows the tilde will be translated to the name used in the database. Note that when referring to fields on embedded documents, the name of the :class:`~mongoengine.EmbeddedDocumentField`, followed by a dot, should be used before the name of the field on the embedded document. The following example shows how the substitutions are made:: class Comment(EmbeddedDocument): content = StringField(db_field='body') class BlogPost(Document): title = StringField(db_field='doctitle') comments = ListField(EmbeddedDocumentField(Comment), name='cs') # Returns a list of dictionaries. Each dictionary contains a value named # "document", which corresponds to the "title" field on a BlogPost, and # "comment", which corresponds to an individual comment. The substitutions # made are shown in the comments. BlogPost.objects.exec_js(""" function() { var comments = []; db[collection].find(query).forEach(function(doc) { // doc[~comments] -> doc["cs"] var docComments = doc[~comments]; for (var i = 0; i < docComments.length; i++) { // doc[~comments][i] -> doc["cs"][i] var comment = doc[~comments][i]; comments.push({ // doc[~title] -> doc["doctitle"] 'document': doc[~title], // comment[~comments.content] -> comment["body"] 'comment': comment[~comments.content] }); } }); return comments; } """) MongoEngine-mongoengine-7a1b110/docs/guide/signals.rst000066400000000000000000000026051177143177100227730ustar00rootroot00000000000000.. _signals: Signals ======= .. versionadded:: 0.5 .. note:: Signal support is provided by the excellent `blinker`_ library and will gracefully fall back if it is not available. The following document signals exist in MongoEngine and are pretty self-explanatory: * `mongoengine.signals.pre_init` * `mongoengine.signals.post_init` * `mongoengine.signals.pre_save` * `mongoengine.signals.post_save` * `mongoengine.signals.pre_delete` * `mongoengine.signals.post_delete` * `mongoengine.signals.pre_bulk_insert` * `mongoengine.signals.post_bulk_insert` Example usage:: from mongoengine import * from mongoengine import signals class Author(Document): name = StringField() def __unicode__(self): return self.name @classmethod def pre_save(cls, sender, document, **kwargs): logging.debug("Pre Save: %s" % document.name) @classmethod def post_save(cls, sender, document, **kwargs): logging.debug("Post Save: %s" % document.name) if 'created' in kwargs: if kwargs['created']: logging.debug("Created") else: logging.debug("Updated") signals.pre_save.connect(Author.pre_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) .. _blinker: http://pypi.python.org/pypi/blinker MongoEngine-mongoengine-7a1b110/docs/index.rst000066400000000000000000000027421177143177100213470ustar00rootroot00000000000000============================== MongoEngine User Documentation ============================== **MongoEngine** is an Object-Document Mapper, written in Python for working with MongoDB. To install it, simply run .. code-block:: console # pip install -U mongoengine :doc:`tutorial` Start here for a quick overview. :doc:`guide/index` The Full guide to MongoEngine :doc:`apireference` The complete API documentation. :doc:`upgrade` How to upgrade MongoEngine. :doc:`django` Using MongoEngine and Django Community --------- To get help with using MongoEngine, use the `MongoEngine Users mailing list `_ or come chat on the `#mongoengine IRC channel `_. Contributing ------------ The source is available on `GitHub `_ and contributions are always encouraged. Contributions can be as simple as minor tweaks to this documentation. To contribute, fork the project on `GitHub `_ and send a pull request. Also, you can join the developers' `mailing list `_. Changes ------- See the :doc:`changelog` for a full list of changes to MongoEngine and :doc:`upgrade` for upgrade information. .. toctree:: :hidden: tutorial guide/index apireference django changelog upgrade Indices and tables ------------------ * :ref:`genindex` * :ref:`modindex` * :ref:`search` MongoEngine-mongoengine-7a1b110/docs/tutorial.rst000066400000000000000000000275321177143177100221070ustar00rootroot00000000000000======== Tutorial ======== This tutorial introduces **MongoEngine** by means of example --- we will walk through how to create a simple **Tumblelog** application. A Tumblelog is a type of blog where posts are not constrained to being conventional text-based posts. As well as text-based entries, users may post images, links, videos, etc. For simplicity's sake, we'll stick to text, image and link entries in our application. As the purpose of this tutorial is to introduce MongoEngine, we'll focus on the data-modelling side of the application, leaving out a user interface. Getting started =============== Before we start, make sure that a copy of MongoDB is running in an accessible location --- running it locally will be easier, but if that is not an option then it may be run on a remote server. Before we can start using MongoEngine, we need to tell it how to connect to our instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` function. The only argument we need to provide is the name of the MongoDB database to use:: from mongoengine import * connect('tumblelog') For more information about connecting to MongoDB see :ref:`guide-connecting`. Defining our documents ====================== MongoDB is *schemaless*, which means that no schema is enforced by the database --- we may add and remove fields however we want and MongoDB won't complain. This makes life a lot easier in many regards, especially when there is a change to the data model. However, defining schemata for our documents can help to iron out bugs involving incorrect types or missing fields, and also allow us to define utility methods on our documents in the same way that traditional :abbr:`ORMs (Object-Relational Mappers)` do. In our Tumblelog application we need to store several different types of information. We will need to have a collection of **users**, so that we may link posts to an individual. We also need to store our different types **posts** (text, image and link) in the database. To aid navigation of our Tumblelog, posts may have **tags** associated with them, so that the list of posts shown to the user may be limited to posts that have been assigned a specified tag. Finally, it would be nice if **comments** could be added to posts. We'll start with **users**, as the others are slightly more involved. Users ----- Just as if we were using a relational database with an ORM, we need to define which fields a :class:`User` may have, and what their types will be:: class User(Document): email = StringField(required=True) first_name = StringField(max_length=50) last_name = StringField(max_length=50) This looks similar to how a the structure of a table would be defined in a regular ORM. The key difference is that this schema will never be passed on to MongoDB --- this will only be enforced at the application level. Also, the User documents will be stored in a MongoDB *collection* rather than a table. Posts, Comments and Tags ------------------------ Now we'll think about how to store the rest of the information. If we were using a relational database, we would most likely have a table of **posts**, a table of **comments** and a table of **tags**. To associate the comments with individual posts, we would put a column in the comments table that contained a foreign key to the posts table. We'd also need a link table to provide the many-to-many relationship between posts and tags. Then we'd need to address the problem of storing the specialised post-types (text, image and link). There are several ways we can achieve this, but each of them have their problems --- none of them stand out as particularly intuitive solutions. Posts ^^^^^ But MongoDB *isn't* a relational database, so we're not going to do it that way. As it turns out, we can use MongoDB's schemaless nature to provide us with a much nicer solution. We will store all of the posts in *one collection* --- each post type will just have the fields it needs. If we later want to add video posts, we don't have to modify the collection at all, we just *start using* the new fields we need to support video posts. This fits with the Object-Oriented principle of *inheritance* nicely. We can think of :class:`Post` as a base class, and :class:`TextPost`, :class:`ImagePost` and :class:`LinkPost` as subclasses of :class:`Post`. In fact, MongoEngine supports this kind of modelling out of the box:: class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User) class TextPost(Post): content = StringField() class ImagePost(Post): image_path = StringField() class LinkPost(Post): link_url = StringField() We are storing a reference to the author of the posts using a :class:`~mongoengine.ReferenceField` object. These are similar to foreign key fields in traditional ORMs, and are automatically translated into references when they are saved, and dereferenced when they are loaded. Tags ^^^^ Now that we have our Post models figured out, how will we attach tags to them? MongoDB allows us to store lists of items natively, so rather than having a link table, we can just store a list of tags in each post. So, for both efficiency and simplicity's sake, we'll store the tags as strings directly within the post, rather than storing references to tags in a separate collection. Especially as tags are generally very short (often even shorter than a document's id), this denormalisation won't impact very strongly on the size of our database. So let's take a look that the code our modified :class:`Post` class:: class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User) tags = ListField(StringField(max_length=30)) The :class:`~mongoengine.ListField` object that is used to define a Post's tags takes a field object as its first argument --- this means that you can have lists of any type of field (including lists). Note that we don't need to modify the specialised post types as they all inherit from :class:`Post`. Comments ^^^^^^^^ A comment is typically associated with *one* post. In a relational database, to display a post with its comments, we would have to retrieve the post from the database, then query the database again for the comments associated with the post. This works, but there is no real reason to be storing the comments separately from their associated posts, other than to work around the relational model. Using MongoDB we can store the comments as a list of *embedded documents* directly on a post document. An embedded document should be treated no differently that a regular document; it just doesn't have its own collection in the database. Using MongoEngine, we can define the structure of embedded documents, along with utility methods, in exactly the same way we do with regular documents:: class Comment(EmbeddedDocument): content = StringField() name = StringField(max_length=120) We can then store a list of comment documents in our post document:: class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User) tags = ListField(StringField(max_length=30)) comments = ListField(EmbeddedDocumentField(Comment)) Handling deletions of references ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The :class:`~mongoengine.ReferenceField` object takes a keyword `reverse_delete_rule` for handling deletion rules if the reference is deleted. To delete all the posts if a user is deleted set the rule:: class Post(Document): title = StringField(max_length=120, required=True) author = ReferenceField(User, reverse_delete_rule=CASCADE) tags = ListField(StringField(max_length=30)) comments = ListField(EmbeddedDocumentField(Comment)) See :class:`~mongoengine.ReferenceField` for more information. ..note:: MapFields and DictFields currently don't support automatic handling of deleted references Adding data to our Tumblelog ============================ Now that we've defined how our documents will be structured, let's start adding some documents to the database. Firstly, we'll need to create a :class:`User` object:: john = User(email='jdoe@example.com', first_name='John', last_name='Doe') john.save() Note that we could have also defined our user using attribute syntax:: john = User(email='jdoe@example.com') john.first_name = 'John' john.last_name = 'Doe' john.save() Now that we've got our user in the database, let's add a couple of posts:: post1 = TextPost(title='Fun with MongoEngine', author=john) post1.content = 'Took a look at MongoEngine today, looks pretty cool.' post1.tags = ['mongodb', 'mongoengine'] post1.save() post2 = LinkPost(title='MongoEngine Documentation', author=john) post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' post2.tags = ['mongoengine'] post2.save() Note that if you change a field on a object that has already been saved, then call :meth:`save` again, the document will be updated. Accessing our data ================== So now we've got a couple of posts in our database, how do we display them? Each document class (i.e. any class that inherits either directly or indirectly from :class:`~mongoengine.Document`) has an :attr:`objects` attribute, which is used to access the documents in the database collection associated with that class. So let's see how we can get our posts' titles:: for post in Post.objects: print post.title Retrieving type-specific information ------------------------------------ This will print the titles of our posts, one on each line. But What if we want to access the type-specific data (link_url, content, etc.)? One way is simply to use the :attr:`objects` attribute of a subclass of :class:`Post`:: for post in TextPost.objects: print post.content Using TextPost's :attr:`objects` attribute only returns documents that were created using :class:`TextPost`. Actually, there is a more general rule here: the :attr:`objects` attribute of any subclass of :class:`~mongoengine.Document` only looks for documents that were created using that subclass or one of its subclasses. So how would we display all of our posts, showing only the information that corresponds to each post's specific type? There is a better way than just using each of the subclasses individually. When we used :class:`Post`'s :attr:`objects` attribute earlier, the objects being returned weren't actually instances of :class:`Post` --- they were instances of the subclass of :class:`Post` that matches the post's type. Let's look at how this works in practice:: for post in Post.objects: print post.title print '=' * len(post.title) if isinstance(post, TextPost): print post.content if isinstance(post, LinkPost): print 'Link:', post.link_url print This would print the title of each post, followed by the content if it was a text post, and "Link: " if it was a link post. Searching our posts by tag -------------------------- The :attr:`objects` attribute of a :class:`~mongoengine.Document` is actually a :class:`~mongoengine.queryset.QuerySet` object. This lazily queries the database only when you need the data. It may also be filtered to narrow down your query. Let's adjust our query so that only posts with the tag "mongodb" are returned:: for post in Post.objects(tags='mongodb'): print post.title There are also methods available on :class:`~mongoengine.queryset.QuerySet` objects that allow different results to be returned, for example, calling :meth:`first` on the :attr:`objects` attribute will return a single document, the first matched by the query you provide. Aggregation functions may also be used on :class:`~mongoengine.queryset.QuerySet` objects:: num_posts = Post.objects(tags='mongodb').count() print 'Found %d posts with tag "mongodb"' % num_posts MongoEngine-mongoengine-7a1b110/docs/upgrade.rst000066400000000000000000000066301177143177100216670ustar00rootroot00000000000000========= Upgrading ========= 0.5 to 0.6 ========== Embedded Documents - if you had a `pk` field you will have to rename it from `_id` to `pk` as pk is no longer a property of Embedded Documents. Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw an InvalidDocument error as they aren't currently supported. Document._get_subclasses - Is no longer used and the class method has been removed. Document.objects.with_id - now raises an InvalidQueryError if used with a filter. FutureWarning - A future warning has been added to all inherited classes that don't define `allow_inheritance` in their meta. You may need to update pyMongo to 2.0 for use with Sharding. 0.4 to 0.5 =========== There have been the following backwards incompatibilities from 0.4 to 0.5. The main areas of changed are: choices in fields, map_reduce and collection names. Choice options: --------------- Are now expected to be an iterable of tuples, with the first element in each tuple being the actual value to be stored. The second element is the human-readable name for the option. PyMongo / MongoDB ----------------- map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output parameters, have been depreciated. More methods now use map_reduce as db.eval is not supported for sharding as such the following have been changed: * :meth:`~mongoengine.queryset.QuerySet.sum` * :meth:`~mongoengine.queryset.QuerySet.average` * :meth:`~mongoengine.queryset.QuerySet.item_frequencies` Default collection naming ------------------------- Previously it was just lowercase, its now much more pythonic and readable as its lowercase and underscores, previously :: class MyAceDocument(Document): pass MyAceDocument._meta['collection'] == myacedocument In 0.5 this will change to :: class MyAceDocument(Document): pass MyAceDocument._get_collection_name() == my_ace_document To upgrade use a Mixin class to set meta like so :: class BaseMixin(object): meta = { 'collection': lambda c: c.__name__.lower() } class MyAceDocument(Document, BaseMixin): pass MyAceDocument._get_collection_name() == "myacedocument" Alternatively, you can rename your collections eg :: from mongoengine.connection import _get_db from mongoengine.base import _document_registry def rename_collections(): db = _get_db() failure = False collection_names = [d._get_collection_name() for d in _document_registry.values()] for new_style_name in collection_names: if not new_style_name: # embedded documents don't have collections continue old_style_name = new_style_name.replace('_', '') if old_style_name == new_style_name: continue # Nothing to do existing = db.collection_names() if old_style_name in existing: if new_style_name in existing: failure = True print "FAILED to rename: %s to %s (already exists)" % ( old_style_name, new_style_name) else: db[old_style_name].rename(new_style_name) print "Renamed: %s to %s" % (old_style_name, new_style_name) if failure: print "Upgrading collection names failed" else: print "Upgraded collection names" MongoEngine-mongoengine-7a1b110/mongoengine/000077500000000000000000000000001177143177100210565ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/mongoengine/__init__.py000066400000000000000000000010061177143177100231640ustar00rootroot00000000000000import document from document import * import fields from fields import * import connection from connection import * import queryset from queryset import * import signals from signals import * __all__ = (document.__all__ + fields.__all__ + connection.__all__ + queryset.__all__ + signals.__all__) VERSION = (0, 6, 13) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) return version __version__ = get_version() MongoEngine-mongoengine-7a1b110/mongoengine/base.py000066400000000000000000001417761177143177100223620ustar00rootroot00000000000000import warnings from queryset import QuerySet, QuerySetManager from queryset import DoesNotExist, MultipleObjectsReturned from queryset import DO_NOTHING from mongoengine import signals import sys import pymongo from bson import ObjectId import operator from functools import partial from bson.dbref import DBRef class NotRegistered(Exception): pass class InvalidDocumentError(Exception): pass class ValidationError(AssertionError): """Validation exception. May represent an error validating a field or a document containing fields with validation errors. :ivar errors: A dictionary of errors for fields within this document or list, or None if the error is for an individual field. """ errors = {} field_name = None _message = None def __init__(self, message="", **kwargs): self.errors = kwargs.get('errors', {}) self.field_name = kwargs.get('field_name') self.message = message def __str__(self): return self.message def __repr__(self): return '%s(%s,)' % (self.__class__.__name__, self.message) def __getattribute__(self, name): message = super(ValidationError, self).__getattribute__(name) if name == 'message': if self.field_name: message = '%s ("%s")' % (message, self.field_name) if self.errors: message = '%s:\n%s' % (message, self._format_errors()) return message def _get_message(self): return self._message def _set_message(self, message): self._message = message message = property(_get_message, _set_message) def to_dict(self): """Returns a dictionary of all errors within a document Keys are field names or list indices and values are the validation error messages, or a nested dictionary of errors for an embedded document or list. """ def build_dict(source): errors_dict = {} if not source: return errors_dict if isinstance(source, dict): for field_name, error in source.iteritems(): errors_dict[field_name] = build_dict(error) elif isinstance(source, ValidationError) and source.errors: return build_dict(source.errors) else: return unicode(source) return errors_dict if not self.errors: return {} return build_dict(self.errors) def _format_errors(self): """Returns a string listing all errors within a document""" def format_error(field, value, prefix=''): prefix = "%s.%s" % (prefix, field) if prefix else "%s" % field if isinstance(value, dict): return '\n'.join( [format_error(k, value[k], prefix) for k in value]) else: return "%s: %s" % (prefix, value) return '\n'.join( [format_error(k, v) for k, v in self.to_dict().items()]) _document_registry = {} def get_document(name): doc = _document_registry.get(name, None) if not doc: # Possible old style names end = ".%s" % name possible_match = [k for k in _document_registry.keys() if k.endswith(end)] if len(possible_match) == 1: doc = _document_registry.get(possible_match.pop(), None) if not doc: raise NotRegistered(""" `%s` has not been registered in the document registry. Importing the document class automatically registers it, has it been imported? """.strip() % name) return doc class BaseField(object): """A base class for fields in a MongoDB document. Instances of this class may be added to subclasses of `Document` to define a document's schema. .. versionchanged:: 0.5 - added verbose and help text """ name = None # Fields may have _types inserted into indexes by default _index_with_types = True _geo_index = False # These track each time a Field instance is created. Used to retain order. # The auto_creation_counter is used for fields that MongoEngine implicitly # creates, creation_counter is used for all user-specified fields. creation_counter = 0 auto_creation_counter = -1 def __init__(self, db_field=None, name=None, required=False, default=None, unique=False, unique_with=None, primary_key=False, validation=None, choices=None, verbose_name=None, help_text=None): self.db_field = (db_field or name) if not primary_key else '_id' if name: msg = "Fields' 'name' attribute deprecated in favour of 'db_field'" warnings.warn(msg, DeprecationWarning) self.name = None self.required = required or primary_key self.default = default self.unique = bool(unique or unique_with) self.unique_with = unique_with self.primary_key = primary_key self.validation = validation self.choices = choices self.verbose_name = verbose_name self.help_text = help_text # Adjust the appropriate creation counter, and save our local copy. if self.db_field == '_id': self.creation_counter = BaseField.auto_creation_counter BaseField.auto_creation_counter -= 1 else: self.creation_counter = BaseField.creation_counter BaseField.creation_counter += 1 def __get__(self, instance, owner): """Descriptor for retrieving a value from a field in a document. Do any necessary conversion between Python and MongoDB types. """ if instance is None: # Document class being used rather than a document object return self # Get value from document instance if available, if not use default value = instance._data.get(self.name) if value is None: value = self.default # Allow callable default values if callable(value): value = value() return value def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ instance._data[self.name] = value instance._mark_as_changed(self.name) def error(self, message="", errors=None, field_name=None): """Raises a ValidationError. """ field_name = field_name if field_name else self.name raise ValidationError(message, errors=errors, field_name=field_name) def to_python(self, value): """Convert a MongoDB-compatible type to a Python type. """ return value def to_mongo(self, value): """Convert a Python type to a MongoDB-compatible type. """ return self.to_python(value) def prepare_query_value(self, op, value): """Prepare a value that is being used in a query for PyMongo. """ return value def validate(self, value): """Perform validation on a value. """ pass def _validate(self, value): from mongoengine import Document, EmbeddedDocument # check choices if self.choices: is_cls = isinstance(value, (Document, EmbeddedDocument)) value_to_check = value.__class__ if is_cls else value err_msg = 'an instance' if is_cls else 'one' if isinstance(self.choices[0], (list, tuple)): option_keys = [option_key for option_key, option_value in self.choices] if value_to_check not in option_keys: self.error('Value must be %s of %s' % (err_msg, unicode(option_keys))) elif value_to_check not in self.choices: self.error('Value must be %s of %s' % (err_msg, unicode(self.choices))) # check validation argument if self.validation is not None: if callable(self.validation): if not self.validation(value): self.error('Value does not match custom validation method') else: raise ValueError('validation argument for "%s" must be a ' 'callable.' % self.name) self.validate(value) class ComplexBaseField(BaseField): """Handles complex fields, such as lists / dictionaries. Allows for nesting of embedded documents inside complex types. Handles the lazy dereferencing of a queryset by lazily dereferencing all items in a list / dict rather than one at a time. .. versionadded:: 0.5 """ field = None _dereference = False def __get__(self, instance, owner): """Descriptor to automatically dereference references. """ if instance is None: # Document class being used rather than a document object return self if not self._dereference and instance._initialised: from dereference import DeReference self._dereference = DeReference() # Cached instance._data[self.name] = self._dereference( instance._data.get(self.name), max_depth=1, instance=instance, name=self.name ) value = super(ComplexBaseField, self).__get__(instance, owner) # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)) and not isinstance(value, BaseList): value = BaseList(value, instance, self.name) instance._data[self.name] = value elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, instance, self.name) instance._data[self.name] = value if self._dereference and instance._initialised and \ isinstance(value, (BaseList, BaseDict)) and not value._dereferenced: value = self._dereference( value, max_depth=1, instance=instance, name=self.name ) value._dereferenced = True instance._data[self.name] = value return value def __set__(self, instance, value): """Descriptor for assigning a value to a field in a document. """ instance._data[self.name] = value instance._mark_as_changed(self.name) def to_python(self, value): """Convert a MongoDB-compatible type to a Python type. """ from mongoengine import Document if isinstance(value, basestring): return value if hasattr(value, 'to_python'): return value.to_python() is_list = False if not hasattr(value, 'items'): try: is_list = True value = dict([(k, v) for k, v in enumerate(value)]) except TypeError: # Not iterable return the value return value if self.field: value_dict = dict([(key, self.field.to_python(item)) for key, item in value.items()]) else: value_dict = {} for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: self.error('You can only reference documents once they' ' have been saved to the database') collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, 'to_python'): value_dict[k] = v.to_python() else: value_dict[k] = self.to_python(v) if is_list: # Convert back to a list return [v for k, v in sorted(value_dict.items(), key=operator.itemgetter(0))] return value_dict def to_mongo(self, value): """Convert a Python type to a MongoDB-compatible type. """ from mongoengine import Document if isinstance(value, basestring): return value if hasattr(value, 'to_mongo'): return value.to_mongo() is_list = False if not hasattr(value, 'items'): try: is_list = True value = dict([(k, v) for k, v in enumerate(value)]) except TypeError: # Not iterable return the value return value if self.field: value_dict = dict([(key, self.field.to_mongo(item)) for key, item in value.items()]) else: value_dict = {} for k, v in value.items(): if isinstance(v, Document): # We need the id from the saved object to create the DBRef if v.pk is None: self.error('You can only reference documents once they' ' have been saved to the database') # If its a document that is not inheritable it won't have # _types / _cls data so make it a generic reference allows # us to dereference meta = getattr(v, 'meta', getattr(v, '_meta', {})) if meta and not meta.get('allow_inheritance', True) and not self.field: from fields import GenericReferenceField value_dict[k] = GenericReferenceField().to_mongo(v) else: collection = v._get_collection_name() value_dict[k] = DBRef(collection, v.pk) elif hasattr(v, 'to_mongo'): value_dict[k] = v.to_mongo() else: value_dict[k] = self.to_mongo(v) if is_list: # Convert back to a list return [v for k, v in sorted(value_dict.items(), key=operator.itemgetter(0))] return value_dict def validate(self, value): """If field is provided ensure the value is valid. """ errors = {} if self.field: if hasattr(value, 'iteritems'): sequence = value.iteritems() else: sequence = enumerate(value) for k, v in sequence: try: self.field._validate(v) except (ValidationError, AssertionError), error: if hasattr(error, 'errors'): errors[k] = error.errors else: errors[k] = error if errors: field_class = self.field.__class__.__name__ self.error('Invalid %s item (%s)' % (field_class, value), errors=errors) # Don't allow empty values if required if self.required and not value: self.error('Field is required and cannot be empty') def prepare_query_value(self, op, value): return self.to_mongo(value) def lookup_member(self, member_name): if self.field: return self.field.lookup_member(member_name) return None def _set_owner_document(self, owner_document): if self.field: self.field.owner_document = owner_document self._owner_document = owner_document def _get_owner_document(self, owner_document): self._owner_document = owner_document owner_document = property(_get_owner_document, _set_owner_document) class ObjectIdField(BaseField): """An field wrapper around MongoDB's ObjectIds. """ def to_python(self, value): return value def to_mongo(self, value): if not isinstance(value, ObjectId): try: return ObjectId(unicode(value)) except Exception, e: # e.message attribute has been deprecated since Python 2.6 self.error(unicode(e)) return value def prepare_query_value(self, op, value): return self.to_mongo(value) def validate(self, value): try: ObjectId(unicode(value)) except: self.error('Invalid Object ID') class DocumentMetaclass(type): """Metaclass for all documents. """ def __new__(cls, name, bases, attrs): def _get_mixin_fields(base): attrs = {} attrs.update(dict([(k, v) for k, v in base.__dict__.items() if issubclass(v.__class__, BaseField)])) # Handle simple mixin's with meta if hasattr(base, 'meta') and not isinstance(base, DocumentMetaclass): meta = attrs.get('meta', {}) meta.update(base.meta) attrs['meta'] = meta for p_base in base.__bases__: #optimize :-) if p_base in (object, BaseDocument): continue attrs.update(_get_mixin_fields(p_base)) return attrs metaclass = attrs.get('__metaclass__') super_new = super(DocumentMetaclass, cls).__new__ if metaclass and issubclass(metaclass, DocumentMetaclass): return super_new(cls, name, bases, attrs) doc_fields = {} class_name = [name] superclasses = {} simple_class = True for base in bases: # Include all fields present in superclasses if hasattr(base, '_fields'): doc_fields.update(base._fields) # Get superclasses from superclass superclasses[base._class_name] = base superclasses.update(base._superclasses) else: # Add any mixin fields attrs.update(_get_mixin_fields(base)) if hasattr(base, '_meta') and not base._meta.get('abstract'): # Ensure that the Document class may be subclassed - # inheritance may be disabled to remove dependency on # additional fields _cls and _types class_name.append(base._class_name) if not base._meta.get('allow_inheritance_defined', True): warnings.warn( "%s uses inheritance, the default for allow_inheritance " "is changing to off by default. Please add it to the " "document meta." % name, FutureWarning ) if base._meta.get('allow_inheritance', True) == False: raise ValueError('Document %s may not be subclassed' % base.__name__) else: simple_class = False doc_class_name = '.'.join(reversed(class_name)) meta = attrs.get('_meta', {}) meta.update(attrs.get('meta', {})) if 'allow_inheritance' not in meta: meta['allow_inheritance'] = True # Only simple classes - direct subclasses of Document - may set # allow_inheritance to False if not simple_class and not meta['allow_inheritance'] and not meta['abstract']: raise ValueError('Only direct subclasses of Document may set ' '"allow_inheritance" to False') attrs['_meta'] = meta attrs['_class_name'] = doc_class_name attrs['_superclasses'] = superclasses # Add the document's fields to the _fields attribute field_names = {} for attr_name, attr_value in attrs.items(): if hasattr(attr_value, "__class__") and \ issubclass(attr_value.__class__, BaseField): attr_value.name = attr_name if not attr_value.db_field: attr_value.db_field = attr_name doc_fields[attr_name] = attr_value field_names[attr_value.db_field] = field_names.get(attr_value.db_field, 0) + 1 duplicate_db_fields = [k for k, v in field_names.items() if v > 1] if duplicate_db_fields: raise InvalidDocumentError("Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields)) attrs['_fields'] = doc_fields attrs['_db_field_map'] = dict([(k, v.db_field) for k, v in doc_fields.items() if k != v.db_field]) attrs['_reverse_db_field_map'] = dict([(v, k) for k, v in attrs['_db_field_map'].items()]) from mongoengine import Document, EmbeddedDocument, DictField new_class = super_new(cls, name, bases, attrs) for field in new_class._fields.values(): field.owner_document = new_class delete_rule = getattr(field, 'reverse_delete_rule', DO_NOTHING) f = field if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): delete_rule = getattr(f.field, 'reverse_delete_rule', DO_NOTHING) if isinstance(f, DictField) and delete_rule != DO_NOTHING: raise InvalidDocumentError("Reverse delete rules are not supported for %s (field: %s)" % (field.__class__.__name__, field.name)) f = field.field if delete_rule != DO_NOTHING: if issubclass(new_class, EmbeddedDocument): raise InvalidDocumentError("Reverse delete rules are not supported for EmbeddedDocuments (field: %s)" % field.name) f.document_type.register_delete_rule(new_class, field.name, delete_rule) if field.name and hasattr(Document, field.name) and EmbeddedDocument not in new_class.mro(): raise InvalidDocumentError("%s is a document method and not a valid field name" % field.name) module = attrs.get('__module__') base_excs = tuple(base.DoesNotExist for base in bases if hasattr(base, 'DoesNotExist')) or (DoesNotExist,) exc = subclass_exception('DoesNotExist', base_excs, module) new_class.add_to_class('DoesNotExist', exc) base_excs = tuple(base.MultipleObjectsReturned for base in bases if hasattr(base, 'MultipleObjectsReturned')) base_excs = base_excs or (MultipleObjectsReturned,) exc = subclass_exception('MultipleObjectsReturned', base_excs, module) new_class.add_to_class('MultipleObjectsReturned', exc) global _document_registry _document_registry[doc_class_name] = new_class return new_class def add_to_class(self, name, value): setattr(self, name, value) class TopLevelDocumentMetaclass(DocumentMetaclass): """Metaclass for top-level documents (i.e. documents that have their own collection in the database. """ def __new__(cls, name, bases, attrs): super_new = super(TopLevelDocumentMetaclass, cls).__new__ # Classes defined in this package are abstract and should not have # their own metadata with DB collection, etc. # __metaclass__ is only set on the class with the __metaclass__ # attribute (i.e. it is not set on subclasses). This differentiates # 'real' documents from the 'Document' class # # Also assume a class is abstract if it has abstract set to True in # its meta dictionary. This allows custom Document superclasses. if (attrs.get('__metaclass__') == TopLevelDocumentMetaclass or ('meta' in attrs and attrs['meta'].get('abstract', False))): # Make sure no base class was non-abstract non_abstract_bases = [b for b in bases if hasattr(b, '_meta') and not b._meta.get('abstract', False)] if non_abstract_bases: raise ValueError("Abstract document cannot have non-abstract base") return super_new(cls, name, bases, attrs) collection = ''.join('_%s' % c if c.isupper() else c for c in name).strip('_').lower() id_field = None abstract_base_indexes = [] base_indexes = [] base_meta = {} # Subclassed documents inherit collection from superclass for base in bases: if hasattr(base, '_meta'): if 'collection' in attrs.get('meta', {}) and not base._meta.get('abstract', False): import warnings msg = "Trying to set a collection on a subclass (%s)" % name warnings.warn(msg, SyntaxWarning) del(attrs['meta']['collection']) if base._get_collection_name(): collection = base._get_collection_name() # Propagate index options. for key in ('index_background', 'index_drop_dups', 'index_opts'): if key in base._meta: base_meta[key] = base._meta[key] id_field = id_field or base._meta.get('id_field') if base._meta.get('abstract', False): abstract_base_indexes += base._meta.get('indexes', []) else: base_indexes += base._meta.get('indexes', []) # Propagate 'allow_inheritance' if 'allow_inheritance' in base._meta: base_meta['allow_inheritance'] = base._meta['allow_inheritance'] if 'queryset_class' in base._meta: base_meta['queryset_class'] = base._meta['queryset_class'] try: base_meta['objects'] = base.__getattribute__(base, 'objects') except TypeError: pass except AttributeError: pass meta = { 'abstract': False, 'collection': collection, 'max_documents': None, 'max_size': None, 'ordering': [], # default ordering applied at runtime 'indexes': [], # indexes to be ensured at runtime 'id_field': id_field, 'index_background': False, 'index_drop_dups': False, 'index_opts': {}, 'queryset_class': QuerySet, 'delete_rules': {}, 'allow_inheritance': True } allow_inheritance_defined = ('allow_inheritance' in base_meta or 'allow_inheritance'in attrs.get('meta', {})) meta['allow_inheritance_defined'] = allow_inheritance_defined meta.update(base_meta) # Apply document-defined meta options meta.update(attrs.get('meta', {})) attrs['_meta'] = meta # Set up collection manager, needs the class to have fields so use # DocumentMetaclass before instantiating CollectionManager object new_class = super_new(cls, name, bases, attrs) collection = attrs['_meta'].get('collection', None) if callable(collection): new_class._meta['collection'] = collection(new_class) # Provide a default queryset unless one has been manually provided manager = attrs.get('objects', meta.get('objects', QuerySetManager())) if hasattr(manager, 'queryset_class'): meta['queryset_class'] = manager.queryset_class new_class.objects = manager indicies = meta['indexes'] + abstract_base_indexes user_indexes = [QuerySet._build_index_spec(new_class, spec) for spec in indicies] + base_indexes new_class._meta['indexes'] = user_indexes unique_indexes = cls._unique_with_indexes(new_class) new_class._meta['unique_indexes'] = unique_indexes for field_name, field in new_class._fields.items(): # Check for custom primary key if field.primary_key: current_pk = new_class._meta['id_field'] if current_pk and current_pk != field_name: raise ValueError('Cannot override primary key field') if not current_pk: new_class._meta['id_field'] = field_name # Make 'Document.id' an alias to the real primary key field new_class.id = field if not new_class._meta['id_field']: new_class._meta['id_field'] = 'id' new_class._fields['id'] = ObjectIdField(db_field='_id') new_class.id = new_class._fields['id'] return new_class @classmethod def _unique_with_indexes(cls, new_class, namespace=""): unique_indexes = [] for field_name, field in new_class._fields.items(): # Generate a list of indexes needed by uniqueness constraints if field.unique: field.required = True unique_fields = [field.db_field] # Add any unique_with fields to the back of the index spec if field.unique_with: if isinstance(field.unique_with, basestring): field.unique_with = [field.unique_with] # Convert unique_with field names to real field names unique_with = [] for other_name in field.unique_with: parts = other_name.split('.') # Lookup real name parts = QuerySet._lookup_field(new_class, parts) name_parts = [part.db_field for part in parts] unique_with.append('.'.join(name_parts)) # Unique field should be required parts[-1].required = True unique_fields += unique_with # Add the new index to the list index = [("%s%s" % (namespace, f), pymongo.ASCENDING) for f in unique_fields] unique_indexes.append(index) # Grab any embedded document field unique indexes if field.__class__.__name__ == "EmbeddedDocumentField" and field.document_type != new_class: field_namespace = "%s." % field_name unique_indexes += cls._unique_with_indexes(field.document_type, field_namespace) return unique_indexes class BaseDocument(object): _dynamic = False _created = True _dynamic_lock = True _initialised = False def __init__(self, **values): signals.pre_init.send(self.__class__, document=self, values=values) self._data = {} # Assign default values to instance for attr_name, field in self._fields.items(): value = getattr(self, attr_name, None) setattr(self, attr_name, value) # Set passed values after initialisation if self._dynamic: self._dynamic_fields = {} dynamic_data = {} for key, value in values.items(): if key in self._fields or key == '_id': setattr(self, key, value) elif self._dynamic: dynamic_data[key] = value else: for key, value in values.items(): key = self._reverse_db_field_map.get(key, key) setattr(self, key, value) # Set any get_fieldname_display methods self.__set_field_display() if self._dynamic: self._dynamic_lock = False for key, value in dynamic_data.items(): setattr(self, key, value) # Flag initialised self._initialised = True signals.post_init.send(self.__class__, document=self) def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: field = None if not hasattr(self, name) and not name.startswith('_'): from fields import DynamicField field = DynamicField(db_field=name) field.name = name self._dynamic_fields[name] = field if not name.startswith('_'): value = self.__expand_dynamic_values(name, value) # Handle marking data as changed if name in self._dynamic_fields: self._data[name] = value if hasattr(self, '_changed_fields'): self._mark_as_changed(name) if not self._created and name in self._meta.get('shard_key', tuple()): from queryset import OperationError raise OperationError("Shard Keys are immutable. Tried to update %s" % name) super(BaseDocument, self).__setattr__(name, value) def __expand_dynamic_values(self, name, value): """expand any dynamic values to their correct types / values""" if not isinstance(value, (dict, list, tuple)): return value is_list = False if not hasattr(value, 'items'): is_list = True value = dict([(k, v) for k, v in enumerate(value)]) if not is_list and '_cls' in value: cls = get_document(value['_cls']) value = cls(**value) value._dynamic = True value._changed_fields = [] return value data = {} for k, v in value.items(): key = name if is_list else k data[k] = self.__expand_dynamic_values(key, v) if is_list: # Convert back to a list data_items = sorted(data.items(), key=operator.itemgetter(0)) value = [v for k, v in data_items] else: value = data # Convert lists / values so we can watch for any changes on them if isinstance(value, (list, tuple)) and not isinstance(value, BaseList): value = BaseList(value, self, name) elif isinstance(value, dict) and not isinstance(value, BaseDict): value = BaseDict(value, self, name) return value def validate(self): """Ensure that all fields' values are valid and that required fields are present. """ # Get a list of tuples of field names and their current values fields = [(field, getattr(self, name)) for name, field in self._fields.items()] # Ensure that each field is matched to a valid value errors = {} for field, value in fields: if value is not None: try: field._validate(value) except ValidationError, error: errors[field.name] = error.errors or error except (ValueError, AttributeError, AssertionError), error: errors[field.name] = error elif field.required: errors[field.name] = ValidationError('Field is required', field_name=field.name) if errors: raise ValidationError('Errors encountered validating document', errors=errors) def to_mongo(self): """Return data dictionary ready for use with MongoDB. """ data = {} for field_name, field in self._fields.items(): value = getattr(self, field_name, None) if value is not None: data[field.db_field] = field.to_mongo(value) # Only add _cls and _types if allow_inheritance is not False if not (hasattr(self, '_meta') and self._meta.get('allow_inheritance', True) == False): data['_cls'] = self._class_name data['_types'] = self._superclasses.keys() + [self._class_name] if '_id' in data and data['_id'] is None: del data['_id'] if not self._dynamic: return data for name, field in self._dynamic_fields.items(): data[name] = field.to_mongo(self._data.get(name, None)) return data @classmethod def _get_collection_name(cls): """Returns the collection name for this class. """ return cls._meta.get('collection', None) @classmethod def _from_son(cls, son): """Create an instance of a Document (subclass) from a PyMongo SON. """ # get the class name from the document, falling back to the given # class if unavailable class_name = son.get('_cls', cls._class_name) data = dict(("%s" % key, value) for key, value in son.items()) if '_types' in data: del data['_types'] if '_cls' in data: del data['_cls'] # Return correct subclass for document type if class_name != cls._class_name: cls = get_document(class_name) changed_fields = [] errors_dict = {} for field_name, field in cls._fields.items(): if field.db_field in data: value = data[field.db_field] try: data[field_name] = (value if value is None else field.to_python(value)) if field_name != field.db_field: del data[field.db_field] except (AttributeError, ValueError), e: errors_dict[field_name] = e elif field.default: default = field.default if callable(default): default = default() if isinstance(default, BaseDocument): changed_fields.append(field_name) if errors_dict: errors = "\n".join(["%s - %s" % (k, v) for k, v in errors_dict.items()]) raise InvalidDocumentError(""" Invalid data to create a `%s` instance.\n%s""".strip() % (cls._class_name, errors)) obj = cls(**data) obj._changed_fields = changed_fields obj._created = False return obj def _mark_as_changed(self, key): """Marks a key as explicitly changed by the user """ if not key: return key = self._db_field_map.get(key, key) if hasattr(self, '_changed_fields') and key not in self._changed_fields: self._changed_fields.append(key) def _get_changed_fields(self, key='', inspected=None): """Returns a list of all fields that have explicitly been changed. """ from mongoengine import EmbeddedDocument, DynamicEmbeddedDocument _changed_fields = [] _changed_fields += getattr(self, '_changed_fields', []) inspected = inspected or set() if hasattr(self, 'id'): if self.id in inspected: return _changed_fields inspected.add(self.id) field_list = self._fields.copy() if self._dynamic: field_list.update(self._dynamic_fields) for field_name in field_list: db_field_name = self._db_field_map.get(field_name, field_name) key = '%s.' % db_field_name field = getattr(self, field_name, None) if hasattr(field, 'id'): if field.id in inspected: continue inspected.add(field.id) if isinstance(field, (EmbeddedDocument, DynamicEmbeddedDocument)) and db_field_name not in _changed_fields: # Grab all embedded fields that have been changed _changed_fields += ["%s%s" % (key, k) for k in field._get_changed_fields(key, inspected) if k] elif isinstance(field, (list, tuple, dict)) and db_field_name not in _changed_fields: # Loop list / dict fields as they contain documents # Determine the iterator to use if not hasattr(field, 'items'): iterator = enumerate(field) else: iterator = field.iteritems() for index, value in iterator: if not hasattr(value, '_get_changed_fields'): continue list_key = "%s%s." % (key, index) _changed_fields += ["%s%s" % (list_key, k) for k in value._get_changed_fields(list_key, inspected) if k] return _changed_fields def _delta(self): """Returns the delta (set, unset) of the changes for a document. Gets any values that have been explicitly changed. """ # Handles cases where not loaded from_son but has _id doc = self.to_mongo() set_fields = self._get_changed_fields() set_data = {} unset_data = {} parts = [] if hasattr(self, '_changed_fields'): set_data = {} # Fetch each set item from its path for path in set_fields: parts = path.split('.') d = doc new_path = [] for p in parts: if isinstance(d, DBRef): break elif p.isdigit(): d = d[int(p)] elif hasattr(d, 'get'): d = d.get(p) new_path.append(p) path = '.'.join(new_path) set_data[path] = d else: set_data = doc if '_id' in set_data: del(set_data['_id']) # Determine if any changed items were actually unset. for path, value in set_data.items(): if value or isinstance(value, bool): continue # If we've set a value that ain't the default value dont unset it. default = None if self._dynamic and len(parts) and parts[0] in self._dynamic_fields: del(set_data[path]) unset_data[path] = 1 continue elif path in self._fields: default = self._fields[path].default else: # Perform a full lookup for lists / embedded lookups d = self parts = path.split('.') db_field_name = parts.pop() for p in parts: if p.isdigit(): d = d[int(p)] elif hasattr(d, '__getattribute__') and not isinstance(d, dict): real_path = d._reverse_db_field_map.get(p, p) d = getattr(d, real_path) else: d = d.get(p) if hasattr(d, '_fields'): field_name = d._reverse_db_field_map.get(db_field_name, db_field_name) if field_name in d._fields: default = d._fields.get(field_name).default else: default = None if default is not None: if callable(default): default = default() if default != value: continue del(set_data[path]) unset_data[path] = 1 return set_data, unset_data @classmethod def _geo_indices(cls, inspected=None): inspected = inspected or [] geo_indices = [] inspected.append(cls) for field in cls._fields.values(): if hasattr(field, 'document_type'): field_cls = field.document_type if field_cls in inspected: continue if hasattr(field_cls, '_geo_indices'): geo_indices += field_cls._geo_indices(inspected) elif field._geo_index: geo_indices.append(field) return geo_indices def __getstate__(self): removals = ["get_%s_display" % k for k, v in self._fields.items() if v.choices] for k in removals: if hasattr(self, k): delattr(self, k) return self.__dict__ def __setstate__(self, __dict__): self.__dict__ = __dict__ self.__set_field_display() def __set_field_display(self): for attr_name, field in self._fields.items(): if field.choices: # dynamically adds a way to get the display value for a field with choices setattr(self, 'get_%s_display' % attr_name, partial(self.__get_field_display, field=field)) def __get_field_display(self, field): """Returns the display value for a choice field""" value = getattr(self, field.name) if field.choices and isinstance(field.choices[0], (list, tuple)): return dict(field.choices).get(value, value) return value def __iter__(self): return iter(self._fields) def __getitem__(self, name): """Dictionary-style field access, return a field's value if present. """ try: if name in self._fields: return getattr(self, name) except AttributeError: pass raise KeyError(name) def __setitem__(self, name, value): """Dictionary-style field access, set a field's value. """ # Ensure that the field exists before settings its value if name not in self._fields: raise KeyError(name) return setattr(self, name, value) def __contains__(self, name): try: val = getattr(self, name) return val is not None except AttributeError: return False def __len__(self): return len(self._data) def __repr__(self): try: u = unicode(self).encode('utf-8') except (UnicodeEncodeError, UnicodeDecodeError): u = '[Bad Unicode data]' return '<%s: %s>' % (self.__class__.__name__, u) def __str__(self): if hasattr(self, '__unicode__'): return unicode(self).encode('utf-8') return '%s object' % self.__class__.__name__ def __eq__(self, other): if isinstance(other, self.__class__) and hasattr(other, 'id'): if self.id == other.id: return True return False def __ne__(self, other): return not self.__eq__(other) def __hash__(self): if self.pk is None: # For new object return super(BaseDocument, self).__hash__() else: return hash(self.pk) class BaseList(list): """A special list so we can watch any changes """ _dereferenced = False _instance = None _name = None def __init__(self, list_items, instance, name): self._instance = instance self._name = name return super(BaseList, self).__init__(list_items) def __setitem__(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).__setitem__(*args, **kwargs) def __delitem__(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).__delitem__(*args, **kwargs) def __getstate__(self): self.observer = None return self def __setstate__(self, state): self = state return self def append(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).append(*args, **kwargs) def extend(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).extend(*args, **kwargs) def insert(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).insert(*args, **kwargs) def pop(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).pop(*args, **kwargs) def remove(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).remove(*args, **kwargs) def reverse(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).reverse(*args, **kwargs) def sort(self, *args, **kwargs): self._mark_as_changed() return super(BaseList, self).sort(*args, **kwargs) def _mark_as_changed(self): if hasattr(self._instance, '_mark_as_changed'): self._instance._mark_as_changed(self._name) class BaseDict(dict): """A special dict so we can watch any changes """ _dereferenced = False _instance = None _name = None def __init__(self, dict_items, instance, name): self._instance = instance self._name = name return super(BaseDict, self).__init__(dict_items) def __setitem__(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).__setitem__(*args, **kwargs) def __delete__(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).__delete__(*args, **kwargs) def __delitem__(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).__delitem__(*args, **kwargs) def __delattr__(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).__delattr__(*args, **kwargs) def __getstate__(self): self.instance = None self._dereferenced = False return self def __setstate__(self, state): self = state return self def clear(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).clear(*args, **kwargs) def pop(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).pop(*args, **kwargs) def popitem(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).popitem(*args, **kwargs) def update(self, *args, **kwargs): self._mark_as_changed() return super(BaseDict, self).update(*args, **kwargs) def _mark_as_changed(self): if hasattr(self._instance, '_mark_as_changed'): self._instance._mark_as_changed(self._name) if sys.version_info < (2, 5): # Prior to Python 2.5, Exception was an old-style class import types def subclass_exception(name, parents, unused): import types return types.ClassType(name, parents, {}) else: def subclass_exception(name, parents, module): return type(name, parents, {'__module__': module}) MongoEngine-mongoengine-7a1b110/mongoengine/connection.py000066400000000000000000000134401177143177100235710ustar00rootroot00000000000000import pymongo from pymongo import Connection, ReplicaSetConnection, uri_parser __all__ = ['ConnectionError', 'connect', 'register_connection', 'DEFAULT_CONNECTION_NAME'] DEFAULT_CONNECTION_NAME = 'default' class ConnectionError(Exception): pass _connection_settings = {} _connections = {} _dbs = {} def register_connection(alias, name, host='localhost', port=27017, is_slave=False, read_preference=False, slaves=None, username=None, password=None, **kwargs): """Add a connection. :param alias: the name that will be used to refer to this connection throughout MongoEngine :param name: the name of the specific database to use :param host: the host name of the :program:`mongod` instance to connect to :param port: the port that the :program:`mongod` instance is running on :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+ :param read_preference: The read preference for the collection ** Added pymongo 2.1 :param slaves: a list of aliases of slave connections; each of these must be a registered connection that has :attr:`is_slave` set to ``True`` :param username: username to authenticate with :param password: password to authenticate with :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver """ global _connection_settings conn_settings = { 'name': name, 'host': host, 'port': port, 'is_slave': is_slave, 'slaves': slaves or [], 'username': username, 'password': password, 'read_preference': read_preference } # Handle uri style connections if "://" in host: uri_dict = uri_parser.parse_uri(host) if uri_dict.get('database') is None: raise ConnectionError("If using URI style connection include "\ "database name in string") conn_settings.update({ 'host': host, 'name': uri_dict.get('database'), 'username': uri_dict.get('username'), 'password': uri_dict.get('password'), 'read_preference': read_preference, }) if "replicaSet" in host: conn_settings['replicaSet'] = True conn_settings.update(kwargs) _connection_settings[alias] = conn_settings def disconnect(alias=DEFAULT_CONNECTION_NAME): global _connections global _dbs if alias in _connections: get_connection(alias=alias).disconnect() del _connections[alias] if alias in _dbs: del _dbs[alias] def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): global _connections # Connect to the database if not already connected if reconnect: disconnect(alias) if alias not in _connections: if alias not in _connection_settings: msg = 'Connection with alias "%s" has not been defined' % alias if alias == DEFAULT_CONNECTION_NAME: msg = 'You have not defined a default connection' raise ConnectionError(msg) conn_settings = _connection_settings[alias].copy() if hasattr(pymongo, 'version_tuple'): # Support for 2.1+ conn_settings.pop('name', None) conn_settings.pop('slaves', None) conn_settings.pop('is_slave', None) conn_settings.pop('username', None) conn_settings.pop('password', None) else: # Get all the slave connections if 'slaves' in conn_settings: slaves = [] for slave_alias in conn_settings['slaves']: slaves.append(get_connection(slave_alias)) conn_settings['slaves'] = slaves conn_settings.pop('read_preference', None) connection_class = Connection if 'replicaSet' in conn_settings: conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) # Discard port since it can't be used on ReplicaSetConnection conn_settings.pop('port', None) # Discard replicaSet if not base string if not isinstance(conn_settings['replicaSet'], basestring): conn_settings.pop('replicaSet', None) connection_class = ReplicaSetConnection try: _connections[alias] = connection_class(**conn_settings) except Exception, e: raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) return _connections[alias] def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): global _dbs if reconnect: disconnect(alias) if alias not in _dbs: conn = get_connection(alias) conn_settings = _connection_settings[alias] _dbs[alias] = conn[conn_settings['name']] # Authenticate if necessary if conn_settings['username'] and conn_settings['password']: _dbs[alias].authenticate(conn_settings['username'], conn_settings['password']) return _dbs[alias] def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): """Connect to the database specified by the 'db' argument. Connection settings may be provided here as well if the database is not running on the default port on localhost. If authentication is needed, provide username and password arguments as well. Multiple databases are supported by using aliases. Provide a separate `alias` to connect to a different instance of :program:`mongod`. .. versionchanged:: 0.6 - added multiple database support. """ global _connections if alias not in _connections: register_connection(alias, db, **kwargs) return get_connection(alias) # Support old naming convention _get_connection = get_connection _get_db = get_db MongoEngine-mongoengine-7a1b110/mongoengine/dereference.py000066400000000000000000000206701177143177100237040ustar00rootroot00000000000000from bson import DBRef, SON from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) from fields import (ReferenceField, ListField, DictField, MapField) from connection import get_db from queryset import QuerySet from document import Document class DeReference(object): def __call__(self, items, max_depth=1, instance=None, name=None): """ Cheaply dereferences the items to a set depth. Also handles the convertion of complex data types. :param items: The iterable (dict, list, queryset) to be dereferenced. :param max_depth: The maximum depth to recurse to :param instance: The owning instance used for tracking changes by :class:`~mongoengine.base.ComplexBaseField` :param name: The name of the field, used for tracking changes by :class:`~mongoengine.base.ComplexBaseField` :param get: A boolean determining if being called by __get__ """ if items is None or isinstance(items, basestring): return items # cheapest way to convert a queryset to a list # list(queryset) uses a count() query to determine length if isinstance(items, QuerySet): items = [i for i in items] self.max_depth = max_depth doc_type = None if instance and instance._fields: doc_type = instance._fields[name].field if isinstance(doc_type, ReferenceField): doc_type = doc_type.document_type if all([i.__class__ == doc_type for i in items]): return items self.reference_map = self._find_references(items) self.object_map = self._fetch_objects(doc_type=doc_type) return self._attach_objects(items, 0, instance, name) def _find_references(self, items, depth=0): """ Recursively finds all db references to be dereferenced :param items: The iterable (dict, list, queryset) :param depth: The current depth of recursion """ reference_map = {} if not items or depth >= self.max_depth: return reference_map # Determine the iterator to use if not hasattr(items, 'items'): iterator = enumerate(items) else: iterator = items.iteritems() # Recursively find dbreferences depth += 1 for k, item in iterator: if hasattr(item, '_fields'): for field_name, field in item._fields.iteritems(): v = item._data.get(field_name, None) if isinstance(v, (DBRef)): reference_map.setdefault(field.document_type, []).append(v.id) elif isinstance(v, (dict, SON)) and '_ref' in v: reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: field_cls = getattr(getattr(field, 'field', None), 'document_type', None) references = self._find_references(v, depth) for key, refs in references.iteritems(): if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): key = field_cls reference_map.setdefault(key, []).extend(refs) elif isinstance(item, (DBRef)): reference_map.setdefault(item.collection, []).append(item.id) elif isinstance(item, (dict, SON)) and '_ref' in item: reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: references = self._find_references(item, depth - 1) for key, refs in references.iteritems(): reference_map.setdefault(key, []).extend(refs) return reference_map def _fetch_objects(self, doc_type=None): """Fetch all references and convert to their document objects """ object_map = {} for col, dbrefs in self.reference_map.iteritems(): keys = object_map.keys() refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) if hasattr(col, 'objects'): # We have a document class for the refs references = col.objects.in_bulk(refs) for key, doc in references.iteritems(): object_map[key] = doc else: # Generic reference: use the refs data to convert to document if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) for ref in references: doc = doc_type._from_son(ref) object_map[doc.id] = doc else: references = get_db()[col].find({'_id': {'$in': refs}}) for ref in references: if '_cls' in ref: doc = get_document(ref["_cls"])._from_son(ref) elif doc_type is None: doc = get_document( ''.join(x.capitalize() for x in col.split('_')))._from_son(ref) else: doc = doc_type._from_son(ref) object_map[doc.id] = doc return object_map def _attach_objects(self, items, depth=0, instance=None, name=None): """ Recursively finds all db references to be dereferenced :param items: The iterable (dict, list, queryset) :param depth: The current depth of recursion :param instance: The owning instance used for tracking changes by :class:`~mongoengine.base.ComplexBaseField` :param name: The name of the field, used for tracking changes by :class:`~mongoengine.base.ComplexBaseField` """ if not items: if isinstance(items, (BaseDict, BaseList)): return items if instance: if isinstance(items, dict): return BaseDict(items, instance, name) else: return BaseList(items, instance, name) if isinstance(items, (dict, SON)): if '_ref' in items: return self.object_map.get(items['_ref'].id, items) elif '_types' in items and '_cls' in items: doc = get_document(items['_cls'])._from_son(items) doc._data = self._attach_objects(doc._data, depth, doc, name) return doc if not hasattr(items, 'items'): is_list = True iterator = enumerate(items) data = [] else: is_list = False iterator = items.iteritems() data = {} depth += 1 for k, v in iterator: if is_list: data.append(v) else: data[k] = v if k in self.object_map: data[k] = self.object_map[k] elif hasattr(v, '_fields'): for field_name, field in v._fields.iteritems(): v = data[k]._data.get(field_name, None) if isinstance(v, (DBRef)): data[k]._data[field_name] = self.object_map.get(v.id, v) elif isinstance(v, (dict, SON)) and '_ref' in v: data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) elif isinstance(v, dict) and depth <= self.max_depth: data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) elif isinstance(v, (list, tuple)) and depth <= self.max_depth: data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) elif hasattr(v, 'id'): data[k] = self.object_map.get(v.id, v) if instance and name: if is_list: return BaseList(data, instance, name) return BaseDict(data, instance, name) depth += 1 return data MongoEngine-mongoengine-7a1b110/mongoengine/django/000077500000000000000000000000001177143177100223205ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/mongoengine/django/__init__.py000066400000000000000000000000001177143177100244170ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/mongoengine/django/auth.py000066400000000000000000000136131177143177100236370ustar00rootroot00000000000000import datetime from mongoengine import * from django.utils.encoding import smart_str from django.contrib.auth.models import AnonymousUser from django.utils.translation import ugettext_lazy as _ try: from django.contrib.auth.hashers import check_password, make_password except ImportError: """Handle older versions of Django""" from django.utils.hashcompat import md5_constructor, sha_constructor def get_hexdigest(algorithm, salt, raw_password): raw_password, salt = smart_str(raw_password), smart_str(salt) if algorithm == 'md5': return md5_constructor(salt + raw_password).hexdigest() elif algorithm == 'sha1': return sha_constructor(salt + raw_password).hexdigest() raise ValueError('Got unknown password algorithm type in password') def check_password(raw_password, password): algo, salt, hash = password.split('$') return hash == get_hexdigest(algo, salt, raw_password) def make_password(raw_password): from random import random algo = 'sha1' salt = get_hexdigest(algo, str(random()), str(random()))[:5] hash = get_hexdigest(algo, salt, raw_password) return '%s$%s$%s' % (algo, salt, hash) REDIRECT_FIELD_NAME = 'next' class User(Document): """A User document that aims to mirror most of the API specified by Django at http://docs.djangoproject.com/en/dev/topics/auth/#users """ username = StringField(max_length=30, required=True, verbose_name=_('username'), help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters")) first_name = StringField(max_length=30, verbose_name=_('first name')) last_name = StringField(max_length=30, verbose_name=_('last name')) email = EmailField(verbose_name=_('e-mail address')) password = StringField(max_length=128, verbose_name=_('password'), help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the change password form.")) is_staff = BooleanField(default=False, verbose_name=_('staff status'), help_text=_("Designates whether the user can log into this admin site.")) is_active = BooleanField(default=True, verbose_name=_('active'), help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts.")) is_superuser = BooleanField(default=False, verbose_name=_('superuser status'), help_text=_("Designates that this user has all permissions without explicitly assigning them.")) last_login = DateTimeField(default=datetime.datetime.now, verbose_name=_('last login')) date_joined = DateTimeField(default=datetime.datetime.now, verbose_name=_('date joined')) meta = { 'allow_inheritance': True, 'indexes': [ {'fields': ['username'], 'unique': True} ] } def __unicode__(self): return self.username def get_full_name(self): """Returns the users first and last names, separated by a space. """ full_name = u'%s %s' % (self.first_name or '', self.last_name or '') return full_name.strip() def is_anonymous(self): return False def is_authenticated(self): return True def set_password(self, raw_password): """Sets the user's password - always use this rather than directly assigning to :attr:`~mongoengine.django.auth.User.password` as the password is hashed before storage. """ self.password = make_password(raw_password) self.save() return self def check_password(self, raw_password): """Checks the user's password against a provided password - always use this rather than directly comparing to :attr:`~mongoengine.django.auth.User.password` as the password is hashed before storage. """ return check_password(raw_password, self.password) @classmethod def create_user(cls, username, password, email=None): """Create (and save) a new user with the given username, password and email address. """ now = datetime.datetime.now() # Normalize the address by lowercasing the domain part of the email # address. if email is not None: try: email_name, domain_part = email.strip().split('@', 1) except ValueError: pass else: email = '@'.join([email_name, domain_part.lower()]) user = cls(username=username, email=email, date_joined=now) user.set_password(password) user.save() return user def get_and_delete_messages(self): return [] class MongoEngineBackend(object): """Authenticate using MongoEngine and mongoengine.django.auth.User. """ supports_object_permissions = False supports_anonymous_user = False supports_inactive_user = False def authenticate(self, username=None, password=None): user = User.objects(username=username).first() if user: if password and user.check_password(password): return user return None def get_user(self, user_id): return User.objects.with_id(user_id) def get_user(userid): """Returns a User object from an id (User.id). Django's equivalent takes request, but taking an id instead leaves it up to the developer to store the id in any way they want (session, signed cookie, etc.) """ if not userid: return AnonymousUser() return MongoEngineBackend().get_user(userid) or AnonymousUser() MongoEngine-mongoengine-7a1b110/mongoengine/django/sessions.py000066400000000000000000000046231177143177100245450ustar00rootroot00000000000000from datetime import datetime from django.conf import settings from django.contrib.sessions.backends.base import SessionBase, CreateError from django.core.exceptions import SuspiciousOperation from django.utils.encoding import force_unicode from mongoengine.document import Document from mongoengine import fields from mongoengine.queryset import OperationError from mongoengine.connection import DEFAULT_CONNECTION_NAME MONGOENGINE_SESSION_DB_ALIAS = getattr( settings, 'MONGOENGINE_SESSION_DB_ALIAS', DEFAULT_CONNECTION_NAME) class MongoSession(Document): session_key = fields.StringField(primary_key=True, max_length=40) session_data = fields.StringField() expire_date = fields.DateTimeField() meta = {'collection': 'django_session', 'db_alias': MONGOENGINE_SESSION_DB_ALIAS, 'allow_inheritance': False} class SessionStore(SessionBase): """A MongoEngine-based session store for Django. """ def load(self): try: s = MongoSession.objects(session_key=self.session_key, expire_date__gt=datetime.now())[0] return self.decode(force_unicode(s.session_data)) except (IndexError, SuspiciousOperation): self.create() return {} def exists(self, session_key): return bool(MongoSession.objects(session_key=session_key).first()) def create(self): while True: self._session_key = self._get_new_session_key() try: self.save(must_create=True) except CreateError: continue self.modified = True self._session_cache = {} return def save(self, must_create=False): if self.session_key is None: self._session_key = self._get_new_session_key() s = MongoSession(session_key=self.session_key) s.session_data = self.encode(self._get_session(no_load=must_create)) s.expire_date = self.get_expiry_date() try: s.save(force_insert=must_create, safe=True) except OperationError: if must_create: raise CreateError raise def delete(self, session_key=None): if session_key is None: if self.session_key is None: return session_key = self.session_key MongoSession.objects(session_key=session_key).delete() MongoEngine-mongoengine-7a1b110/mongoengine/django/shortcuts.py000066400000000000000000000030141177143177100247260ustar00rootroot00000000000000from django.http import Http404 from mongoengine.queryset import QuerySet from mongoengine.base import BaseDocument from mongoengine.base import ValidationError def _get_queryset(cls): """Inspired by django.shortcuts.*""" if isinstance(cls, QuerySet): return cls else: return cls.objects def get_document_or_404(cls, *args, **kwargs): """ Uses get() to return an document, or raises a Http404 exception if the document does not exist. cls may be a Document or QuerySet object. All other passed arguments and keyword arguments are used in the get() query. Note: Like with get(), an MultipleObjectsReturned will be raised if more than one object is found. Inspired by django.shortcuts.* """ queryset = _get_queryset(cls) try: return queryset.get(*args, **kwargs) except (queryset._document.DoesNotExist, ValidationError): raise Http404('No %s matches the given query.' % queryset._document._class_name) def get_list_or_404(cls, *args, **kwargs): """ Uses filter() to return a list of documents, or raise a Http404 exception if the list is empty. cls may be a Document or QuerySet object. All other passed arguments and keyword arguments are used in the filter() query. Inspired by django.shortcuts.* """ queryset = _get_queryset(cls) obj_list = list(queryset.filter(*args, **kwargs)) if not obj_list: raise Http404('No %s matches the given query.' % queryset._document._class_name) return obj_list MongoEngine-mongoengine-7a1b110/mongoengine/django/storage.py000066400000000000000000000072301177143177100243400ustar00rootroot00000000000000import os import itertools import urlparse from mongoengine import * from django.conf import settings from django.core.files.storage import Storage from django.core.exceptions import ImproperlyConfigured class FileDocument(Document): """A document used to store a single file in GridFS. """ file = FileField() class GridFSStorage(Storage): """A custom storage backend to store files in GridFS """ def __init__(self, base_url=None): if base_url is None: base_url = settings.MEDIA_URL self.base_url = base_url self.document = FileDocument self.field = 'file' def delete(self, name): """Deletes the specified file from the storage system. """ if self.exists(name): doc = self.document.objects.first() field = getattr(doc, self.field) self._get_doc_with_name(name).delete() # Delete the FileField field.delete() # Delete the FileDocument def exists(self, name): """Returns True if a file referened by the given name already exists in the storage system, or False if the name is available for a new file. """ doc = self._get_doc_with_name(name) if doc: field = getattr(doc, self.field) return bool(field.name) else: return False def listdir(self, path=None): """Lists the contents of the specified path, returning a 2-tuple of lists; the first item being directories, the second item being files. """ def name(doc): return getattr(doc, self.field).name docs = self.document.objects return [], [name(d) for d in docs if name(d)] def size(self, name): """Returns the total size, in bytes, of the file specified by name. """ doc = self._get_doc_with_name(name) if doc: return getattr(doc, self.field).length else: raise ValueError("No such file or directory: '%s'" % name) def url(self, name): """Returns an absolute URL where the file's contents can be accessed directly by a web browser. """ if self.base_url is None: raise ValueError("This file is not accessible via a URL.") return urlparse.urljoin(self.base_url, name).replace('\\', '/') def _get_doc_with_name(self, name): """Find the documents in the store with the given name """ docs = self.document.objects doc = [d for d in docs if getattr(d, self.field).name == name] if doc: return doc[0] else: return None def _open(self, name, mode='rb'): doc = self._get_doc_with_name(name) if doc: return getattr(doc, self.field) else: raise ValueError("No file found with the name '%s'." % name) def get_available_name(self, name): """Returns a filename that's free on the target storage system, and available for new content to be written to. """ file_root, file_ext = os.path.splitext(name) # If the filename already exists, add an underscore and a number (before # the file extension, if one exists) to the filename until the generated # filename doesn't exist. count = itertools.count(1) while self.exists(name): # file_ext includes the dot. name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext)) return name def _save(self, name, content): doc = self.document() getattr(doc, self.field).put(content, filename=name) doc.save() return name MongoEngine-mongoengine-7a1b110/mongoengine/django/tests.py000066400000000000000000000012601177143177100240330ustar00rootroot00000000000000#coding: utf-8 from django.test import TestCase from django.conf import settings from mongoengine import connect class MongoTestCase(TestCase): """ TestCase class that clear the collection between the tests """ db_name = 'test_%s' % settings.MONGO_DATABASE_NAME def __init__(self, methodName='runtest'): self.db = connect(self.db_name).get_db() super(MongoTestCase, self).__init__(methodName) def _post_teardown(self): super(MongoTestCase, self)._post_teardown() for collection in self.db.collection_names(): if collection == 'system.indexes': continue self.db.drop_collection(collection) MongoEngine-mongoengine-7a1b110/mongoengine/document.py000066400000000000000000000441211177143177100232500ustar00rootroot00000000000000import pymongo from bson.dbref import DBRef from mongoengine import signals from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, BaseDict, BaseList) from queryset import OperationError from connection import get_db, DEFAULT_CONNECTION_NAME __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', 'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError'] class InvalidCollectionError(Exception): pass class EmbeddedDocument(BaseDocument): """A :class:`~mongoengine.Document` that isn't stored in its own collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as fields on :class:`~mongoengine.Document`\ s through the :class:`~mongoengine.EmbeddedDocumentField` field type. """ __metaclass__ = DocumentMetaclass def __init__(self, *args, **kwargs): super(EmbeddedDocument, self).__init__(*args, **kwargs) self._changed_fields = [] def __delattr__(self, *args, **kwargs): """Handle deletions of fields""" field_name = args[0] if field_name in self._fields: default = self._fields[field_name].default if callable(default): default = default() setattr(self, field_name, default) else: super(EmbeddedDocument, self).__delattr__(*args, **kwargs) def __eq__(self, other): if isinstance(other, self.__class__): return self._data == other._data return False class Document(BaseDocument): """The base class used for defining the structure and properties of collections of documents stored in MongoDB. Inherit from this class, and add fields as class attributes to define a document's structure. Individual documents may then be created by making instances of the :class:`~mongoengine.Document` subclass. By default, the MongoDB collection used to store documents created using a :class:`~mongoengine.Document` subclass will be the name of the subclass converted to lowercase. A different collection may be specified by providing :attr:`collection` to the :attr:`meta` dictionary in the class definition. A :class:`~mongoengine.Document` subclass may be itself subclassed, to create a specialised version of the document that will be stored in the same collection. To facilitate this behaviour, `_cls` and `_types` fields are added to documents (hidden though the MongoEngine interface though). To disable this behaviour and remove the dependence on the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to ``False`` in the :attr:`meta` dictionary. A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta` dictionary. :attr:`max_documents` is the maximum number of documents that is allowed to be stored in the collection, and :attr:`max_size` is the maximum size of the collection in bytes. If :attr:`max_size` is not specified and :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). Indexes may be created by specifying :attr:`indexes` in the :attr:`meta` dictionary. The value should be a list of field names or tuples of field names. Index direction may be specified by prefixing the field names with a **+** or **-** sign. Automatic index creation can be disabled by specifying attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to False then indexes will not be created by MongoEngine. This is useful in production systems where index creation is performed as part of a deployment system. By default, _types will be added to the start of every index (that doesn't contain a list) if allow_inheritance is True. This can be disabled by either setting types to False on the specific index or by setting index_types to False on the meta dictionary for the document. """ __metaclass__ = TopLevelDocumentMetaclass @apply def pk(): """Primary key alias """ def fget(self): return getattr(self, self._meta['id_field']) def fset(self, value): return setattr(self, self._meta['id_field'], value) return property(fget, fset) @classmethod def _get_db(cls): """Some Model using other db_alias""" return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME )) @classmethod def _get_collection(cls): """Returns the collection for the document.""" if not hasattr(cls, '_collection') or cls._collection is None: db = cls._get_db() collection_name = cls._get_collection_name() # Create collection as a capped collection if specified if cls._meta['max_size'] or cls._meta['max_documents']: # Get max document limit and max byte size from meta max_size = cls._meta['max_size'] or 10000000 # 10MB default max_documents = cls._meta['max_documents'] if collection_name in db.collection_names(): cls._collection = db[collection_name] # The collection already exists, check if its capped # options match the specified capped options options = cls._collection.options() if options.get('max') != max_documents or \ options.get('size') != max_size: msg = ('Cannot create collection "%s" as a capped ' 'collection as it already exists') % cls._collection raise InvalidCollectionError(msg) else: # Create the collection as a capped collection opts = {'capped': True, 'size': max_size} if max_documents: opts['max'] = max_documents cls._collection = db.create_collection( collection_name, **opts ) else: cls._collection = db[collection_name] return cls._collection def save(self, safe=True, force_insert=False, validate=True, write_options=None, cascade=None, cascade_kwargs=None, _refs=None): """Save the :class:`~mongoengine.Document` to the database. If the document already exists, it will be updated, otherwise it will be created. If ``safe=True`` and the operation is unsuccessful, an :class:`~mongoengine.OperationError` will be raised. :param safe: check if the operation succeeded before returning :param force_insert: only try to create a new document, don't allow updates of existing documents :param validate: validates the document; set to ``False`` to skip. :param write_options: Extra keyword arguments are passed down to :meth:`~pymongo.collection.Collection.save` OR :meth:`~pymongo.collection.Collection.insert` which will be used as options for the resultant ``getLastError`` command. For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. :param cascade: Sets the flag for cascading saves. You can set a default by setting "cascade" in the document __meta__ :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves :param _refs: A list of processed references used in cascading saves .. versionchanged:: 0.5 In existing documents it only saves changed fields using set / unset Saves are cascaded and any :class:`~bson.dbref.DBRef` objects that have changes are saved as well. .. versionchanged:: 0.6 Cascade saves are optional = defaults to True, if you want fine grain control then you can turn off using document meta['cascade'] = False Also you can pass different kwargs to the cascade save using cascade_kwargs which overwrites the existing kwargs with custom values """ signals.pre_save.send(self.__class__, document=self) if validate: self.validate() if not write_options: write_options = {} doc = self.to_mongo() created = force_insert or '_id' not in doc try: collection = self.__class__.objects._collection if created: if force_insert: object_id = collection.insert(doc, safe=safe, **write_options) else: object_id = collection.save(doc, safe=safe, **write_options) else: object_id = doc['_id'] updates, removals = self._delta() # Need to add shard key to query, or you get an error select_dict = {'_id': object_id} shard_key = self.__class__._meta.get('shard_key', tuple()) for k in shard_key: actual_key = self._db_field_map.get(k, k) select_dict[actual_key] = doc[actual_key] upsert = self._created if updates: collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options) if removals: collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options) cascade = self._meta.get('cascade', True) if cascade is None else cascade if cascade: kwargs = { "safe": safe, "force_insert": force_insert, "validate": validate, "write_options": write_options, "cascade": cascade } if cascade_kwargs: # Allow granular control over cascades kwargs.update(cascade_kwargs) kwargs['_refs'] = _refs #self._changed_fields = [] self.cascade_save(**kwargs) except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' if u'duplicate key' in unicode(err): message = u'Tried to save duplicate unique keys (%s)' raise OperationError(message % unicode(err)) id_field = self._meta['id_field'] self[id_field] = self._fields[id_field].to_python(object_id) self._changed_fields = [] self._created = False signals.post_save.send(self.__class__, document=self, created=created) return self def cascade_save(self, *args, **kwargs): """Recursively saves any references / generic references on an object""" from fields import ReferenceField, GenericReferenceField _refs = kwargs.get('_refs', []) or [] for name, cls in self._fields.items(): if not isinstance(cls, (ReferenceField, GenericReferenceField)): continue ref = getattr(self, name) if not ref: continue ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) if ref and ref_id not in _refs: _refs.append(ref_id) kwargs["_refs"] = _refs ref.save(**kwargs) ref._changed_fields = [] def update(self, **kwargs): """Performs an update on the :class:`~mongoengine.Document` A convenience wrapper to :meth:`~mongoengine.QuerySet.update`. Raises :class:`OperationError` if called on an object that has not yet been saved. """ if not self.pk: raise OperationError('attempt to update a document not yet saved') # Need to add shard key to query, or you get an error select_dict = {'pk': self.pk} shard_key = self.__class__._meta.get('shard_key', tuple()) for k in shard_key: select_dict[k] = getattr(self, k) return self.__class__.objects(**select_dict).update_one(**kwargs) def delete(self, safe=False): """Delete the :class:`~mongoengine.Document` from the database. This will only take effect if the document has been previously saved. :param safe: check if the operation succeeded before returning """ signals.pre_delete.send(self.__class__, document=self) try: self.__class__.objects(pk=self.pk).delete(safe=safe) except pymongo.errors.OperationFailure, err: message = u'Could not delete document (%s)' % err.message raise OperationError(message) signals.post_delete.send(self.__class__, document=self) def select_related(self, max_depth=1): """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to a maximum depth in order to cut down the number queries to mongodb. .. versionadded:: 0.5 """ from dereference import DeReference self._data = DeReference()(self._data, max_depth) return self def reload(self, max_depth=1): """Reloads all attributes from the database. .. versionadded:: 0.1.2 .. versionchanged:: 0.6 Now chainable """ id_field = self._meta['id_field'] obj = self.__class__.objects( **{id_field: self[id_field]} ).first().select_related(max_depth=max_depth) for field in self._fields: setattr(self, field, self._reload(field, obj[field])) if self._dynamic: for name in self._dynamic_fields.keys(): setattr(self, name, self._reload(name, obj._data[name])) self._changed_fields = obj._changed_fields return obj def _reload(self, key, value): """Used by :meth:`~mongoengine.Document.reload` to ensure the correct instance is linked to self. """ if isinstance(value, BaseDict): value = [(k, self._reload(k, v)) for k, v in value.items()] value = BaseDict(value, self, key) elif isinstance(value, BaseList): value = [self._reload(key, v) for v in value] value = BaseList(value, self, key) elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): value._changed_fields = [] return value def to_dbref(self): """Returns an instance of :class:`~bson.dbref.DBRef` useful in `__raw__` queries.""" if not self.pk: msg = "Only saved documents can have a valid dbref" raise OperationError(msg) return DBRef(self.__class__._get_collection_name(), self.pk) @classmethod def register_delete_rule(cls, document_cls, field_name, rule): """This method registers the delete rules to apply when removing this object. """ cls._meta['delete_rules'][(document_cls, field_name)] = rule @classmethod def drop_collection(cls): """Drops the entire collection associated with this :class:`~mongoengine.Document` type from the database. """ from mongoengine.queryset import QuerySet db = cls._get_db() db.drop_collection(cls._get_collection_name()) QuerySet._reset_already_indexed(cls) class DynamicDocument(Document): """A Dynamic Document class allowing flexible, expandable and uncontrolled schemas. As a :class:`~mongoengine.Document` subclass, acts in the same way as an ordinary document but has expando style properties. Any data passed or set against the :class:`~mongoengine.DynamicDocument` that is not a field is automatically converted into a :class:`~mongoengine.DynamicField` and data can be attributed to that field. ..note:: There is one caveat on Dynamic Documents: fields cannot start with `_` """ __metaclass__ = TopLevelDocumentMetaclass _dynamic = True def __delattr__(self, *args, **kwargs): """Deletes the attribute by setting to None and allowing _delta to unset it""" field_name = args[0] if field_name in self._dynamic_fields: setattr(self, field_name, None) else: super(DynamicDocument, self).__delattr__(*args, **kwargs) class DynamicEmbeddedDocument(EmbeddedDocument): """A Dynamic Embedded Document class allowing flexible, expandable and uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more information about dynamic documents. """ __metaclass__ = DocumentMetaclass _dynamic = True def __delattr__(self, *args, **kwargs): """Deletes the attribute by setting to None and allowing _delta to unset it""" field_name = args[0] setattr(self, field_name, None) class MapReduceDocument(object): """A document returned from a map/reduce query. :param collection: An instance of :class:`~pymongo.Collection` :param key: Document/result key, often an instance of :class:`~bson.objectid.ObjectId`. If supplied as an ``ObjectId`` found in the given ``collection``, the object can be accessed via the ``object`` property. :param value: The result(s) for this key. .. versionadded:: 0.3 """ def __init__(self, document, collection, key, value): self._document = document self._collection = collection self.key = key self.value = value @property def object(self): """Lazy-load the object referenced by ``self.key``. ``self.key`` should be the ``primary_key``. """ id_field = self._document()._meta['id_field'] id_field_type = type(id_field) if not isinstance(self.key, id_field_type): try: self.key = id_field_type(self.key) except: raise Exception("Could not cast key as %s" % \ id_field_type.__name__) if not hasattr(self, "_key_object"): self._key_object = self._document.objects.with_id(self.key) return self._key_object return self._key_object MongoEngine-mongoengine-7a1b110/mongoengine/fields.py000066400000000000000000001260551177143177100227070ustar00rootroot00000000000000import datetime import time import decimal import gridfs import re import uuid from bson import Binary, DBRef, SON, ObjectId from base import (BaseField, ComplexBaseField, ObjectIdField, ValidationError, get_document, BaseDocument) from queryset import DO_NOTHING, QuerySet from document import Document, EmbeddedDocument from connection import get_db, DEFAULT_CONNECTION_NAME from operator import itemgetter try: from PIL import Image, ImageOps except ImportError: Image = None ImageOps = None try: from cStringIO import StringIO except ImportError: from StringIO import StringIO __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', 'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', 'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', 'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField', 'GenericReferenceField', 'FileField', 'BinaryField', 'SortedListField', 'EmailField', 'GeoPointField', 'ImageField', 'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField'] RECURSIVE_REFERENCE_CONSTANT = 'self' class StringField(BaseField): """A unicode string field. """ def __init__(self, regex=None, max_length=None, min_length=None, **kwargs): self.regex = re.compile(regex) if regex else None self.max_length = max_length self.min_length = min_length super(StringField, self).__init__(**kwargs) def to_python(self, value): if isinstance(value, unicode): return value else: return value.decode('utf-8') def validate(self, value): if not isinstance(value, basestring): self.error('StringField only accepts string values') if self.max_length is not None and len(value) > self.max_length: self.error('String value is too long') if self.min_length is not None and len(value) < self.min_length: self.error('String value is too short') if self.regex is not None and self.regex.match(value) is None: self.error('String value did not match validation regex') def lookup_member(self, member_name): return None def prepare_query_value(self, op, value): if not isinstance(op, basestring): return value if op.lstrip('i') in ('startswith', 'endswith', 'contains', 'exact'): flags = 0 if op.startswith('i'): flags = re.IGNORECASE op = op.lstrip('i') regex = r'%s' if op == 'startswith': regex = r'^%s' elif op == 'endswith': regex = r'%s$' elif op == 'exact': regex = r'^%s$' # escape unsafe characters which could lead to a re.error value = re.escape(value) value = re.compile(regex % value, flags) return value class URLField(StringField): """A field that validates input as an URL. .. versionadded:: 0.3 """ URL_REGEX = re.compile( r'^https?://' r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' r'localhost|' r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' r'(?::\d+)?' r'(?:/?|[/?]\S+)$', re.IGNORECASE ) def __init__(self, verify_exists=False, **kwargs): self.verify_exists = verify_exists super(URLField, self).__init__(**kwargs) def validate(self, value): if not URLField.URL_REGEX.match(value): self.error('Invalid URL: %s' % value) if self.verify_exists: import urllib2 try: request = urllib2.Request(value) urllib2.urlopen(request) except Exception, e: self.error('This URL appears to be a broken link: %s' % e) class EmailField(StringField): """A field that validates input as an E-Mail-Address. .. versionadded:: 0.4 """ EMAIL_REGEX = re.compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE # domain ) def validate(self, value): if not EmailField.EMAIL_REGEX.match(value): self.error('Invalid Mail-address: %s' % value) class IntField(BaseField): """An integer field. """ def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value super(IntField, self).__init__(**kwargs) def to_python(self, value): return int(value) def validate(self, value): try: value = int(value) except: self.error('%s could not be converted to int' % value) if self.min_value is not None and value < self.min_value: self.error('Integer value is too small') if self.max_value is not None and value > self.max_value: self.error('Integer value is too large') def prepare_query_value(self, op, value): return int(value) class FloatField(BaseField): """An floating point number field. """ def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value super(FloatField, self).__init__(**kwargs) def to_python(self, value): return float(value) def validate(self, value): if isinstance(value, int): value = float(value) if not isinstance(value, float): self.error('FloatField only accepts float values') if self.min_value is not None and value < self.min_value: self.error('Float value is too small') if self.max_value is not None and value > self.max_value: self.error('Float value is too large') def prepare_query_value(self, op, value): return float(value) class DecimalField(BaseField): """A fixed-point decimal number field. .. versionadded:: 0.3 """ def __init__(self, min_value=None, max_value=None, **kwargs): self.min_value, self.max_value = min_value, max_value super(DecimalField, self).__init__(**kwargs) def to_python(self, value): if not isinstance(value, basestring): value = unicode(value) return decimal.Decimal(value) def to_mongo(self, value): return unicode(value) def validate(self, value): if not isinstance(value, decimal.Decimal): if not isinstance(value, basestring): value = str(value) try: value = decimal.Decimal(value) except Exception, exc: self.error('Could not convert value to decimal: %s' % exc) if self.min_value is not None and value < self.min_value: self.error('Decimal value is too small') if self.max_value is not None and value > self.max_value: self.error('Decimal value is too large') class BooleanField(BaseField): """A boolean field type. .. versionadded:: 0.1.2 """ def to_python(self, value): return bool(value) def validate(self, value): if not isinstance(value, bool): self.error('BooleanField only accepts boolean values') class DateTimeField(BaseField): """A datetime field. Note: Microseconds are rounded to the nearest millisecond. Pre UTC microsecond support is effecively broken. Use :class:`~mongoengine.fields.ComplexDateTimeField` if you need accurate microsecond support. """ def validate(self, value): if not isinstance(value, (datetime.datetime, datetime.date)): self.error(u'cannot parse date "%s"' % value) def to_mongo(self, value): return self.prepare_query_value(None, value) def prepare_query_value(self, op, value): if value is None: return value if isinstance(value, datetime.datetime): return value if isinstance(value, datetime.date): return datetime.datetime(value.year, value.month, value.day) # Attempt to parse a datetime: # value = smart_str(value) # split usecs, because they are not recognized by strptime. if '.' in value: try: value, usecs = value.split('.') usecs = int(usecs) except ValueError: return None else: usecs = 0 kwargs = {'microsecond': usecs} try: # Seconds are optional, so try converting seconds first. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M:%S')[:6], **kwargs) except ValueError: try: # Try without seconds. return datetime.datetime(*time.strptime(value, '%Y-%m-%d %H:%M')[:5], **kwargs) except ValueError: # Try without hour/minutes/seconds. try: return datetime.datetime(*time.strptime(value, '%Y-%m-%d')[:3], **kwargs) except ValueError: return None class ComplexDateTimeField(StringField): """ ComplexDateTimeField handles microseconds exactly instead of rounding like DateTimeField does. Derives from a StringField so you can do `gte` and `lte` filtering by using lexicographical comparison when filtering / sorting strings. The stored string has the following format: YYYY,MM,DD,HH,MM,SS,NNNNNN Where NNNNNN is the number of microseconds of the represented `datetime`. The `,` as the separator can be easily modified by passing the `separator` keyword when initializing the field. .. versionadded:: 0.5 """ def __init__(self, separator=',', **kwargs): self.names = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'] self.separtor = separator super(ComplexDateTimeField, self).__init__(**kwargs) def _leading_zero(self, number): """ Converts the given number to a string. If it has only one digit, a leading zero so as it has always at least two digits. """ if int(number) < 10: return "0%s" % number else: return str(number) def _convert_from_datetime(self, val): """ Convert a `datetime` object to a string representation (which will be stored in MongoDB). This is the reverse function of `_convert_from_string`. >>> a = datetime(2011, 6, 8, 20, 26, 24, 192284) >>> RealDateTimeField()._convert_from_datetime(a) '2011,06,08,20,26,24,192284' """ data = [] for name in self.names: data.append(self._leading_zero(getattr(val, name))) return ','.join(data) def _convert_from_string(self, data): """ Convert a string representation to a `datetime` object (the object you will manipulate). This is the reverse function of `_convert_from_datetime`. >>> a = '2011,06,08,20,26,24,192284' >>> ComplexDateTimeField()._convert_from_string(a) datetime.datetime(2011, 6, 8, 20, 26, 24, 192284) """ data = data.split(',') data = map(int, data) values = {} for i in range(7): values[self.names[i]] = data[i] return datetime.datetime(**values) def __get__(self, instance, owner): data = super(ComplexDateTimeField, self).__get__(instance, owner) if data == None: return datetime.datetime.now() return self._convert_from_string(data) def __set__(self, instance, value): value = self._convert_from_datetime(value) if value else value return super(ComplexDateTimeField, self).__set__(instance, value) def validate(self, value): if not isinstance(value, datetime.datetime): self.error('Only datetime objects may used in a ' 'ComplexDateTimeField') def to_python(self, value): return self._convert_from_string(value) def to_mongo(self, value): return self._convert_from_datetime(value) def prepare_query_value(self, op, value): return self._convert_from_datetime(value) class EmbeddedDocumentField(BaseField): """An embedded document field - with a declared document_type. Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. """ def __init__(self, document_type, **kwargs): if not isinstance(document_type, basestring): if not issubclass(document_type, EmbeddedDocument): self.error('Invalid embedded document class provided to an ' 'EmbeddedDocumentField') self.document_type_obj = document_type super(EmbeddedDocumentField, self).__init__(**kwargs) @property def document_type(self): if isinstance(self.document_type_obj, basestring): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: self.document_type_obj = get_document(self.document_type_obj) return self.document_type_obj def to_python(self, value): if not isinstance(value, self.document_type): return self.document_type._from_son(value) return value def to_mongo(self, value): if not isinstance(value, self.document_type): return value return self.document_type.to_mongo(value) def validate(self, value): """Make sure that the document instance is an instance of the EmbeddedDocument subclass provided when the document was defined. """ # Using isinstance also works for subclasses of self.document if not isinstance(value, self.document_type): self.error('Invalid embedded document instance provided to an ' 'EmbeddedDocumentField') self.document_type.validate(value) def lookup_member(self, member_name): return self.document_type._fields.get(member_name) def prepare_query_value(self, op, value): return self.to_mongo(value) class GenericEmbeddedDocumentField(BaseField): """A generic embedded document field - allows any :class:`~mongoengine.EmbeddedDocument` to be stored. Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. ..note :: You can use the choices param to limit the acceptable EmbeddedDocument types """ def prepare_query_value(self, op, value): return self.to_mongo(value) def to_python(self, value): if isinstance(value, dict): doc_cls = get_document(value['_cls']) value = doc_cls._from_son(value) return value def validate(self, value): if not isinstance(value, EmbeddedDocument): self.error('Invalid embedded document instance provided to an ' 'GenericEmbeddedDocumentField') value.validate() def to_mongo(self, document): if document is None: return None data = document.to_mongo() if not '_cls' in data: data['_cls'] = document._class_name return data class DynamicField(BaseField): """Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" def to_mongo(self, value): """Convert a Python type to a MongoDBcompatible type. """ if isinstance(value, basestring): return value if hasattr(value, 'to_mongo'): return value.to_mongo() if not isinstance(value, (dict, list, tuple)): return value is_list = False if not hasattr(value, 'items'): is_list = True value = dict([(k, v) for k, v in enumerate(value)]) data = {} for k, v in value.items(): data[k] = self.to_mongo(v) if is_list: # Convert back to a list value = [v for k, v in sorted(data.items(), key=itemgetter(0))] else: value = data return value def lookup_member(self, member_name): return member_name def prepare_query_value(self, op, value): if isinstance(value, basestring): from mongoengine.fields import StringField return StringField().prepare_query_value(op, value) return self.to_mongo(value) class ListField(ComplexBaseField): """A list field that wraps a standard field, allowing multiple instances of the field to be used as a list in the database. .. note:: Required means it cannot be empty - as the default for ListFields is [] """ # ListFields cannot be indexed with _types - MongoDB doesn't support this _index_with_types = False def __init__(self, field=None, **kwargs): self.field = field kwargs.setdefault('default', lambda: []) super(ListField, self).__init__(**kwargs) def validate(self, value): """Make sure that a list of valid fields is being used. """ if (not isinstance(value, (list, tuple, QuerySet)) or isinstance(value, basestring)): self.error('Only lists and tuples may be used in a list field') super(ListField, self).validate(value) def prepare_query_value(self, op, value): if self.field: if op in ('set', 'unset') and (not isinstance(value, basestring) and not isinstance(value, BaseDocument) and hasattr(value, '__iter__')): return [self.field.prepare_query_value(op, v) for v in value] return self.field.prepare_query_value(op, value) return super(ListField, self).prepare_query_value(op, value) class SortedListField(ListField): """A ListField that sorts the contents of its list before writing to the database in order to ensure that a sorted list is always retrieved. .. warning:: There is a potential race condition when handling lists. If you set / save the whole list then other processes trying to save the whole list as well could overwrite changes. The safest way to append to a list is to perform a push operation. .. versionadded:: 0.4 .. versionchanged:: 0.6 - added reverse keyword """ _ordering = None _order_reverse = False def __init__(self, field, **kwargs): if 'ordering' in kwargs.keys(): self._ordering = kwargs.pop('ordering') if 'reverse' in kwargs.keys(): self._order_reverse = kwargs.pop('reverse') super(SortedListField, self).__init__(field, **kwargs) def to_mongo(self, value): value = super(SortedListField, self).to_mongo(value) if self._ordering is not None: return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse) return sorted(value, reverse=self._order_reverse) class DictField(ComplexBaseField): """A dictionary field that wraps a standard Python dictionary. This is similar to an embedded document, but the structure is not defined. .. note:: Required means it cannot be empty - as the default for ListFields is [] .. versionadded:: 0.3 .. versionchanged:: 0.5 - Can now handle complex / varying types of data """ def __init__(self, basecls=None, field=None, *args, **kwargs): self.field = field self.basecls = basecls or BaseField if not issubclass(self.basecls, BaseField): self.error('DictField only accepts dict values') kwargs.setdefault('default', lambda: {}) super(DictField, self).__init__(*args, **kwargs) def validate(self, value): """Make sure that a list of valid fields is being used. """ if not isinstance(value, dict): self.error('Only dictionaries may be used in a DictField') if any(k for k in value.keys() if not isinstance(k, basestring)): self.error('Invalid dictionary key - documents must have only string keys') if any(('.' in k or '$' in k) for k in value.keys()): self.error('Invalid dictionary key name - keys may not contain "."' ' or "$" characters') super(DictField, self).validate(value) def lookup_member(self, member_name): return DictField(basecls=self.basecls, db_field=member_name) def prepare_query_value(self, op, value): match_operators = ['contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact'] if op in match_operators and isinstance(value, basestring): return StringField().prepare_query_value(op, value) return super(DictField, self).prepare_query_value(op, value) class MapField(DictField): """A field that maps a name to a specified field type. Similar to a DictField, except the 'value' of each item must match the specified field type. .. versionadded:: 0.5 """ def __init__(self, field=None, *args, **kwargs): if not isinstance(field, BaseField): self.error('Argument to MapField constructor must be a valid ' 'field') super(MapField, self).__init__(field=field, *args, **kwargs) class ReferenceField(BaseField): """A reference to a document that will be automatically dereferenced on access (lazily). Use the `reverse_delete_rule` to handle what should happen if the document the field is referencing is deleted. EmbeddedDocuments, DictFields and MapFields do not support reverse_delete_rules and an `InvalidDocumentError` will be raised if trying to set on one of these Document / Field types. The options are: * DO_NOTHING - don't do anything (default). * NULLIFY - Updates the reference to null. * CASCADE - Deletes the documents associated with the reference. * DENY - Prevent the deletion of the reference object. * PULL - Pull the reference from a :class:`~mongoengine.ListField` of references Alternative syntax for registering delete rules (useful when implementing bi-directional delete rules) .. code-block:: python class Bar(Document): content = StringField() foo = ReferenceField('Foo') Bar.register_delete_rule(Foo, 'bar', NULLIFY) .. versionchanged:: 0.5 added `reverse_delete_rule` """ def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs): """Initialises the Reference Field. :param reverse_delete_rule: Determines what to do when the referring object is deleted """ if not isinstance(document_type, basestring): if not issubclass(document_type, (Document, basestring)): self.error('Argument to ReferenceField constructor must be a ' 'document class or a string') self.document_type_obj = document_type self.reverse_delete_rule = reverse_delete_rule super(ReferenceField, self).__init__(**kwargs) @property def document_type(self): if isinstance(self.document_type_obj, basestring): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document else: self.document_type_obj = get_document(self.document_type_obj) return self.document_type_obj def __get__(self, instance, owner): """Descriptor to allow lazy dereferencing. """ if instance is None: # Document class being used rather than a document object return self # Get value from document instance if available value = instance._data.get(self.name) # Dereference DBRefs if isinstance(value, (DBRef)): value = self.document_type._get_db().dereference(value) if value is not None: instance._data[self.name] = self.document_type._from_son(value) return super(ReferenceField, self).__get__(instance, owner) def to_mongo(self, document): if isinstance(document, DBRef): return document id_field_name = self.document_type._meta['id_field'] id_field = self.document_type._fields[id_field_name] if isinstance(document, Document): # We need the id from the saved object to create the DBRef id_ = document.id if id_ is None: self.error('You can only reference documents once they have' ' been saved to the database') else: id_ = document id_ = id_field.to_mongo(id_) collection = self.document_type._get_collection_name() return DBRef(collection, id_) def prepare_query_value(self, op, value): if value is None: return None return self.to_mongo(value) def validate(self, value): if not isinstance(value, (self.document_type, DBRef)): self.error('A ReferenceField only accepts DBRef') if isinstance(value, Document) and value.id is None: self.error('You can only reference documents once they have been ' 'saved to the database') def lookup_member(self, member_name): return self.document_type._fields.get(member_name) class GenericReferenceField(BaseField): """A reference to *any* :class:`~mongoengine.document.Document` subclass that will be automatically dereferenced on access (lazily). ..note :: Any documents used as a generic reference must be registered in the document registry. Importing the model will automatically register it. ..note :: You can use the choices param to limit the acceptable Document types .. versionadded:: 0.3 """ def __get__(self, instance, owner): if instance is None: return self value = instance._data.get(self.name) if isinstance(value, (dict, SON)): instance._data[self.name] = self.dereference(value) return super(GenericReferenceField, self).__get__(instance, owner) def validate(self, value): if not isinstance(value, (Document, DBRef)): self.error('GenericReferences can only contain documents') # We need the id from the saved object to create the DBRef if isinstance(value, Document) and value.id is None: self.error('You can only reference documents once they have been' ' saved to the database') def dereference(self, value): doc_cls = get_document(value['_cls']) reference = value['_ref'] doc = doc_cls._get_db().dereference(reference) if doc is not None: doc = doc_cls._from_son(doc) return doc def to_mongo(self, document): if document is None: return None if isinstance(document, (dict, SON)): return document id_field_name = document.__class__._meta['id_field'] id_field = document.__class__._fields[id_field_name] if isinstance(document, Document): # We need the id from the saved object to create the DBRef id_ = document.id if id_ is None: self.error('You can only reference documents once they have' ' been saved to the database') else: id_ = document id_ = id_field.to_mongo(id_) collection = document._get_collection_name() ref = DBRef(collection, id_) return {'_cls': document._class_name, '_ref': ref} def prepare_query_value(self, op, value): if value is None: return None return self.to_mongo(value) class BinaryField(BaseField): """A binary data field. """ def __init__(self, max_bytes=None, **kwargs): self.max_bytes = max_bytes super(BinaryField, self).__init__(**kwargs) def to_mongo(self, value): return Binary(value) def to_python(self, value): # Returns str not unicode as this is binary data return str(value) def validate(self, value): if not isinstance(value, str): self.error('BinaryField only accepts string values') if self.max_bytes is not None and len(value) > self.max_bytes: self.error('Binary value is too long') class GridFSError(Exception): pass class GridFSProxy(object): """Proxy object to handle writing and reading of files to and from GridFS .. versionadded:: 0.4 .. versionchanged:: 0.5 - added optional size param to read .. versionchanged:: 0.6 - added collection name param """ _fs = None def __init__(self, grid_id=None, key=None, instance=None, db_alias=DEFAULT_CONNECTION_NAME, collection_name='fs'): self.grid_id = grid_id # Store GridFS id for file self.key = key self.instance = instance self.db_alias = db_alias self.collection_name = collection_name self.newfile = None # Used for partial writes self.gridout = None def __getattr__(self, name): attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias', 'collection_name', 'newfile', 'gridout') if name in attrs: return self.__getattribute__(name) obj = self.get() if name in dir(obj): return getattr(obj, name) raise AttributeError def __get__(self, instance, value): return self def __nonzero__(self): return bool(self.grid_id) def __getstate__(self): self_dict = self.__dict__ self_dict['_fs'] = None return self_dict def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.grid_id) def __cmp__(self, other): return cmp((self.grid_id, self.collection_name, self.db_alias), (other.grid_id, other.collection_name, other.db_alias)) @property def fs(self): if not self._fs: self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name) return self._fs def get(self, id=None): if id: self.grid_id = id if self.grid_id is None: return None try: if self.gridout is None: self.gridout = self.fs.get(self.grid_id) return self.gridout except: # File has been deleted return None def new_file(self, **kwargs): self.newfile = self.fs.new_file(**kwargs) self.grid_id = self.newfile._id def put(self, file_obj, **kwargs): if self.grid_id: raise GridFSError('This document already has a file. Either delete ' 'it or call replace to overwrite it') self.grid_id = self.fs.put(file_obj, **kwargs) self._mark_as_changed() def write(self, string): if self.grid_id: if not self.newfile: raise GridFSError('This document already has a file. Either ' 'delete it or call replace to overwrite it') else: self.new_file() self.newfile.write(string) def writelines(self, lines): if not self.newfile: self.new_file() self.grid_id = self.newfile._id self.newfile.writelines(lines) def read(self, size=-1): gridout = self.get() if gridout is None: return None else: try: return gridout.read(size) except: return "" def delete(self): # Delete file from GridFS, FileField still remains self.fs.delete(self.grid_id) self.grid_id = None self.gridout = None self._mark_as_changed() def replace(self, file_obj, **kwargs): self.delete() self.put(file_obj, **kwargs) def close(self): if self.newfile: self.newfile.close() def _mark_as_changed(self): """Inform the instance that `self.key` has been changed""" if self.instance: self.instance._mark_as_changed(self.key) class FileField(BaseField): """A GridFS storage field. .. versionadded:: 0.4 .. versionchanged:: 0.5 added optional size param for read .. versionchanged:: 0.6 added db_alias for multidb support """ proxy_class = GridFSProxy def __init__(self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs): super(FileField, self).__init__(**kwargs) self.collection_name = collection_name self.db_alias = db_alias def __get__(self, instance, owner): if instance is None: return self # Check if a file already exists for this model grid_file = instance._data.get(self.name) if not isinstance(grid_file, self.proxy_class): grid_file = self.proxy_class(key=self.name, instance=instance, db_alias=self.db_alias, collection_name=self.collection_name) instance._data[self.name] = grid_file if not grid_file.key: grid_file.key = self.name grid_file.instance = instance return grid_file def __set__(self, instance, value): key = self.name if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str): # using "FileField() = file/string" notation grid_file = instance._data.get(self.name) # If a file already exists, delete it if grid_file: try: grid_file.delete() except: pass # Create a new file with the new data grid_file.put(value) else: # Create a new proxy object as we don't already have one instance._data[key] = self.proxy_class(key=key, instance=instance, collection_name=self.collection_name) instance._data[key].put(value) else: instance._data[key] = value instance._mark_as_changed(key) def to_mongo(self, value): # Store the GridFS file id in MongoDB if isinstance(value, self.proxy_class) and value.grid_id is not None: return value.grid_id return None def to_python(self, value): if value is not None: return self.proxy_class(value, collection_name=self.collection_name, db_alias=self.db_alias) def validate(self, value): if value.grid_id is not None: if not isinstance(value, self.proxy_class): self.error('FileField only accepts GridFSProxy values') if not isinstance(value.grid_id, ObjectId): self.error('Invalid GridFSProxy value') class ImageGridFsProxy(GridFSProxy): """ Proxy for ImageField versionadded: 0.6 """ def put(self, file_obj, **kwargs): """ Insert a image in database applying field properties (size, thumbnail_size) """ field = self.instance._fields[self.key] try: img = Image.open(file_obj) except: raise ValidationError('Invalid image') if (field.size and (img.size[0] > field.size['width'] or img.size[1] > field.size['height'])): size = field.size if size['force']: img = ImageOps.fit(img, (size['width'], size['height']), Image.ANTIALIAS) else: img.thumbnail((size['width'], size['height']), Image.ANTIALIAS) thumbnail = None if field.thumbnail_size: size = field.thumbnail_size if size['force']: thumbnail = ImageOps.fit(img, (size['width'], size['height']), Image.ANTIALIAS) else: thumbnail = img.copy() thumbnail.thumbnail((size['width'], size['height']), Image.ANTIALIAS) if thumbnail: thumb_id = self._put_thumbnail(thumbnail, img.format) else: thumb_id = None w, h = img.size io = StringIO() img.save(io, img.format) io.seek(0) return super(ImageGridFsProxy, self).put(io, width=w, height=h, format=img.format, thumbnail_id=thumb_id, **kwargs) def delete(self, *args, **kwargs): #deletes thumbnail out = self.get() if out and out.thumbnail_id: self.fs.delete(out.thumbnail_id) return super(ImageGridFsProxy, self).delete(*args, **kwargs) def _put_thumbnail(self, thumbnail, format, **kwargs): w, h = thumbnail.size io = StringIO() thumbnail.save(io, format) io.seek(0) return self.fs.put(io, width=w, height=h, format=format, **kwargs) @property def size(self): """ return a width, height of image """ out = self.get() if out: return out.width, out.height @property def format(self): """ return format of image ex: PNG, JPEG, GIF, etc """ out = self.get() if out: return out.format @property def thumbnail(self): """ return a gridfs.grid_file.GridOut representing a thumbnail of Image """ out = self.get() if out and out.thumbnail_id: return self.fs.get(out.thumbnail_id) def write(self, *args, **kwargs): raise RuntimeError("Please use \"put\" method instead") def writelines(self, *args, **kwargs): raise RuntimeError("Please use \"put\" method instead") class ImproperlyConfigured(Exception): pass class ImageField(FileField): """ A Image File storage field. @size (width, height, force): max size to store images, if larger will be automatically resized ex: size=(800, 600, True) @thumbnail (width, height, force): size to generate a thumbnail .. versionadded:: 0.6 """ proxy_class = ImageGridFsProxy def __init__(self, size=None, thumbnail_size=None, collection_name='images', **kwargs): if not Image: raise ImproperlyConfigured("PIL library was not found") params_size = ('width', 'height', 'force') extra_args = dict(size=size, thumbnail_size=thumbnail_size) for att_name, att in extra_args.items(): if att and (isinstance(att, tuple) or isinstance(att, list)): setattr(self, att_name, dict( map(None, params_size, att))) else: setattr(self, att_name, None) super(ImageField, self).__init__( collection_name=collection_name, **kwargs) class GeoPointField(BaseField): """A list storing a latitude and longitude. .. versionadded:: 0.4 """ _geo_index = True def validate(self, value): """Make sure that a geo-value is of type (x, y) """ if not isinstance(value, (list, tuple)): self.error('GeoPointField can only accept tuples or lists ' 'of (x, y)') if not len(value) == 2: self.error('Value must be a two-dimensional point') if (not isinstance(value[0], (float, int)) and not isinstance(value[1], (float, int))): self.error('Both values in point must be float or int') class SequenceField(IntField): """Provides a sequental counter (see http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers) .. note:: Although traditional databases often use increasing sequence numbers for primary keys. In MongoDB, the preferred approach is to use Object IDs instead. The concept is that in a very large cluster of machines, it is easier to create an object ID than have global, uniformly increasing sequence numbers. .. versionadded:: 0.5 """ def __init__(self, collection_name=None, db_alias = None, *args, **kwargs): self.collection_name = collection_name or 'mongoengine.counters' self.db_alias = db_alias or DEFAULT_CONNECTION_NAME return super(SequenceField, self).__init__(*args, **kwargs) def generate_new_value(self): """ Generate and Increment the counter """ sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), self.name) collection = get_db(alias = self.db_alias )[self.collection_name] counter = collection.find_and_modify(query={"_id": sequence_id}, update={"$inc": {"next": 1}}, new=True, upsert=True) return counter['next'] def __get__(self, instance, owner): if instance is None: return self if not instance._data: return value = instance._data.get(self.name) if not value and instance._initialised: value = self.generate_new_value() instance._data[self.name] = value instance._mark_as_changed(self.name) return value def __set__(self, instance, value): if value is None and instance._initialised: value = self.generate_new_value() return super(SequenceField, self).__set__(instance, value) def to_python(self, value): if value is None: value = self.generate_new_value() return value class UUIDField(BaseField): """A UUID field. .. versionadded:: 0.6 """ def __init__(self, **kwargs): super(UUIDField, self).__init__(**kwargs) def to_python(self, value): if not isinstance(value, basestring): value = unicode(value) return uuid.UUID(value) def to_mongo(self, value): return unicode(value) def validate(self, value): if not isinstance(value, uuid.UUID): if not isinstance(value, basestring): value = str(value) try: value = uuid.UUID(value) except Exception, exc: self.error('Could not convert to UUID: %s' % exc) MongoEngine-mongoengine-7a1b110/mongoengine/queryset.py000066400000000000000000002114741177143177100233220ustar00rootroot00000000000000import pprint import re import copy import itertools import operator import pymongo from bson.code import Code from mongoengine import signals __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL'] # The maximum number of items to display in a QuerySet.__repr__ REPR_OUTPUT_SIZE = 20 # Delete rules DO_NOTHING = 0 NULLIFY = 1 CASCADE = 2 DENY = 3 PULL = 4 class DoesNotExist(Exception): pass class MultipleObjectsReturned(Exception): pass class InvalidQueryError(Exception): pass class OperationError(Exception): pass RE_TYPE = type(re.compile('')) class QNodeVisitor(object): """Base visitor class for visiting Q-object nodes in a query tree. """ def visit_combination(self, combination): """Called by QCombination objects. """ return combination def visit_query(self, query): """Called by (New)Q objects. """ return query class SimplificationVisitor(QNodeVisitor): """Simplifies query trees by combinging unnecessary 'and' connection nodes into a single Q-object. """ def visit_combination(self, combination): if combination.operation == combination.AND: # The simplification only applies to 'simple' queries if all(isinstance(node, Q) for node in combination.children): queries = [node.query for node in combination.children] return Q(**self._query_conjunction(queries)) return combination def _query_conjunction(self, queries): """Merges query dicts - effectively &ing them together. """ query_ops = set() combined_query = {} for query in queries: ops = set(query.keys()) # Make sure that the same operation isn't applied more than once # to a single field intersection = ops.intersection(query_ops) if intersection: msg = 'Duplicate query conditions: ' raise InvalidQueryError(msg + ', '.join(intersection)) query_ops.update(ops) combined_query.update(copy.deepcopy(query)) return combined_query class QueryTreeTransformerVisitor(QNodeVisitor): """Transforms the query tree in to a form that may be used with MongoDB. """ def visit_combination(self, combination): if combination.operation == combination.AND: # MongoDB doesn't allow us to have too many $or operations in our # queries, so the aim is to move the ORs up the tree to one # 'master' $or. Firstly, we must find all the necessary parts (part # of an AND combination or just standard Q object), and store them # separately from the OR parts. or_groups = [] and_parts = [] for node in combination.children: if isinstance(node, QCombination): if node.operation == node.OR: # Any of the children in an $or component may cause # the query to succeed or_groups.append(node.children) elif node.operation == node.AND: and_parts.append(node) elif isinstance(node, Q): and_parts.append(node) # Now we combine the parts into a usable query. AND together all of # the necessary parts. Then for each $or part, create a new query # that ANDs the necessary part with the $or part. clauses = [] for or_group in itertools.product(*or_groups): q_object = reduce(lambda a, b: a & b, and_parts, Q()) q_object = reduce(lambda a, b: a & b, or_group, q_object) clauses.append(q_object) # Finally, $or the generated clauses in to one query. Each of the # clauses is sufficient for the query to succeed. return reduce(lambda a, b: a | b, clauses, Q()) if combination.operation == combination.OR: children = [] # Crush any nested ORs in to this combination as MongoDB doesn't # support nested $or operations for node in combination.children: if (isinstance(node, QCombination) and node.operation == combination.OR): children += node.children else: children.append(node) combination.children = children return combination class QueryCompilerVisitor(QNodeVisitor): """Compiles the nodes in a query tree to a PyMongo-compatible query dictionary. """ def __init__(self, document): self.document = document def visit_combination(self, combination): if combination.operation == combination.OR: return {'$or': combination.children} elif combination.operation == combination.AND: return self._mongo_query_conjunction(combination.children) return combination def visit_query(self, query): return QuerySet._transform_query(self.document, **query.query) def _mongo_query_conjunction(self, queries): """Merges Mongo query dicts - effectively &ing them together. """ combined_query = {} for query in queries: for field, ops in query.items(): if field not in combined_query: combined_query[field] = ops else: # The field is already present in the query the only way # we can merge is if both the existing value and the new # value are operation dicts, reject anything else if (not isinstance(combined_query[field], dict) or not isinstance(ops, dict)): message = 'Conflicting values for ' + field raise InvalidQueryError(message) current_ops = set(combined_query[field].keys()) new_ops = set(ops.keys()) # Make sure that the same operation isn't applied more than # once to a single field intersection = current_ops.intersection(new_ops) if intersection: msg = 'Duplicate query conditions: ' raise InvalidQueryError(msg + ', '.join(intersection)) # Right! We've got two non-overlapping dicts of operations! combined_query[field].update(copy.deepcopy(ops)) return combined_query class QNode(object): """Base class for nodes in query trees. """ AND = 0 OR = 1 def to_query(self, document): query = self.accept(SimplificationVisitor()) query = query.accept(QueryTreeTransformerVisitor()) query = query.accept(QueryCompilerVisitor(document)) return query def accept(self, visitor): raise NotImplementedError def _combine(self, other, operation): """Combine this node with another node into a QCombination object. """ if other.empty: return self if self.empty: return other return QCombination(operation, [self, other]) @property def empty(self): return False def __or__(self, other): return self._combine(other, self.OR) def __and__(self, other): return self._combine(other, self.AND) class QCombination(QNode): """Represents the combination of several conditions by a given logical operator. """ def __init__(self, operation, children): self.operation = operation self.children = [] for node in children: # If the child is a combination of the same type, we can merge its # children directly into this combinations children if isinstance(node, QCombination) and node.operation == operation: self.children += node.children else: self.children.append(node) def accept(self, visitor): for i in range(len(self.children)): if isinstance(self.children[i], QNode): self.children[i] = self.children[i].accept(visitor) return visitor.visit_combination(self) @property def empty(self): return not bool(self.children) class Q(QNode): """A simple query object, used in a query tree to build up more complex query structures. """ def __init__(self, **query): self.query = query def accept(self, visitor): return visitor.visit_query(self) @property def empty(self): return not bool(self.query) class QueryFieldList(object): """Object that handles combinations of .only() and .exclude() calls""" ONLY = 1 EXCLUDE = 0 def __init__(self, fields=[], value=ONLY, always_include=[]): self.value = value self.fields = set(fields) self.always_include = set(always_include) self._id = None def as_dict(self): field_list = dict((field, self.value) for field in self.fields) if self._id is not None: field_list['_id'] = self._id return field_list def __add__(self, f): if not self.fields: self.fields = f.fields self.value = f.value elif self.value is self.ONLY and f.value is self.ONLY: self.fields = self.fields.intersection(f.fields) elif self.value is self.EXCLUDE and f.value is self.EXCLUDE: self.fields = self.fields.union(f.fields) elif self.value is self.ONLY and f.value is self.EXCLUDE: self.fields -= f.fields elif self.value is self.EXCLUDE and f.value is self.ONLY: self.value = self.ONLY self.fields = f.fields - self.fields if '_id' in f.fields: self._id = f.value if self.always_include: if self.value is self.ONLY and self.fields: self.fields = self.fields.union(self.always_include) else: self.fields -= self.always_include return self def reset(self): self.fields = set([]) self.value = self.ONLY def __nonzero__(self): return bool(self.fields) class QuerySet(object): """A set of results returned from a query. Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as the results. """ __already_indexed = set() def __init__(self, document, collection): self._document = document self._collection_obj = collection self._mongo_query = None self._query_obj = Q() self._initial_query = {} self._where_clause = None self._loaded_fields = QueryFieldList() self._ordering = [] self._snapshot = False self._timeout = True self._class_check = True self._slave_okay = False self._iter = False self._scalar = [] # If inheritance is allowed, only return instances and instances of # subclasses of the class being used if document._meta.get('allow_inheritance'): self._initial_query = {'_types': self._document._class_name} self._loaded_fields = QueryFieldList(always_include=['_cls']) self._cursor_obj = None self._limit = None self._skip = None self._hint = -1 # Using -1 as None is a valid value for hint def clone(self): """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet` .. versionadded:: 0.5 """ c = self.__class__(self._document, self._collection_obj) copy_props = ('_initial_query', '_query_obj', '_where_clause', '_loaded_fields', '_ordering', '_snapshot', '_timeout', '_limit', '_skip', '_slave_okay', '_hint') for prop in copy_props: val = getattr(self, prop) setattr(c, prop, copy.deepcopy(val)) return c @property def _query(self): if self._mongo_query is None: self._mongo_query = self._query_obj.to_query(self._document) if self._class_check: self._mongo_query.update(self._initial_query) return self._mongo_query def ensure_index(self, key_or_list, drop_dups=False, background=False, **kwargs): """Ensure that the given indexes are in place. :param key_or_list: a single index key or a list of index keys (to construct a multi-field index); keys may be prefixed with a **+** or a **-** to determine the index ordering """ index_spec = QuerySet._build_index_spec(self._document, key_or_list) self._collection.ensure_index( index_spec['fields'], drop_dups=drop_dups, background=background, sparse=index_spec.get('sparse', False), unique=index_spec.get('unique', False)) return self def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): """Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in the query; the :class:`~mongoengine.queryset.QuerySet` is filtered multiple times with different :class:`~mongoengine.queryset.Q` objects, only the last one will be used :param class_check: If set to False bypass class name check when querying collection :param slave_okay: if True, allows this query to be run against a replica secondary. :param query: Django-style query keyword arguments """ query = Q(**query) if q_obj: query &= q_obj self._query_obj &= query self._mongo_query = None self._cursor_obj = None self._class_check = class_check return self def filter(self, *q_objs, **query): """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__` """ return self.__call__(*q_objs, **query) def all(self): """Returns all documents.""" return self.__call__() def _ensure_indexes(self): """Checks the document meta data and ensures all the indexes exist. .. note:: You can disable automatic index creation by setting `auto_create_index` to False in the documents meta data """ background = self._document._meta.get('index_background', False) drop_dups = self._document._meta.get('index_drop_dups', False) index_opts = self._document._meta.get('index_opts', {}) index_types = self._document._meta.get('index_types', True) # determine if an index which we are creating includes # _type as its first field; if so, we can avoid creating # an extra index on _type, as mongodb will use the existing # index to service queries against _type types_indexed = False def includes_types(fields): first_field = None if len(fields): if isinstance(fields[0], basestring): first_field = fields[0] elif isinstance(fields[0], (list, tuple)) and len(fields[0]): first_field = fields[0][0] return first_field == '_types' # Ensure indexes created by uniqueness constraints for index in self._document._meta['unique_indexes']: types_indexed = types_indexed or includes_types(index) self._collection.ensure_index(index, unique=True, background=background, drop_dups=drop_dups, **index_opts) # Ensure document-defined indexes are created if self._document._meta['indexes']: for spec in self._document._meta['indexes']: types_indexed = types_indexed or includes_types(spec['fields']) opts = index_opts.copy() opts['unique'] = spec.get('unique', False) opts['sparse'] = spec.get('sparse', False) self._collection.ensure_index(spec['fields'], background=background, **opts) # If _types is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _types if index_types and '_types' in self._query and not types_indexed: self._collection.ensure_index('_types', background=background, **index_opts) # Add geo indicies for field in self._document._geo_indices(): index_spec = [(field.db_field, pymongo.GEO2D)] self._collection.ensure_index(index_spec, background=background, **index_opts) @classmethod def _build_index_spec(cls, doc_cls, spec): """Build a PyMongo index spec from a MongoEngine index spec. """ if isinstance(spec, basestring): spec = {'fields': [spec]} if isinstance(spec, (list, tuple)): spec = {'fields': spec} index_list = [] use_types = doc_cls._meta.get('allow_inheritance', True) for key in spec['fields']: # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * direction = pymongo.ASCENDING if key.startswith("-"): direction = pymongo.DESCENDING elif key.startswith("*"): direction = pymongo.GEO2D if key.startswith(("+", "-", "*")): key = key[1:] # Use real field name, do it manually because we need field # objects for the next part (list field checking) parts = key.split('.') if parts in (['pk'], ['id'], ['_id']): key = '_id' else: fields = QuerySet._lookup_field(doc_cls, parts) parts = [field if field == '_id' else field.db_field for field in fields] key = '.'.join(parts) index_list.append((key, direction)) # If sparse - dont include types if spec.get('sparse', False): use_types = False # Check if a list field is being used, don't use _types if it is if use_types and not all(f._index_with_types for f in fields): use_types = False # If _types is being used, prepend it to every specified index index_types = doc_cls._meta.get('index_types', True) allow_inheritance = doc_cls._meta.get('allow_inheritance') if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: index_list.insert(0, ('_types', 1)) spec['fields'] = index_list if spec.get('sparse', False) and len(spec['fields']) > 1: raise ValueError( 'Sparse indexes can only have one field in them. ' 'See https://jira.mongodb.org/browse/SERVER-2193') return spec @classmethod def _reset_already_indexed(cls, document=None): """Helper to reset already indexed, can be useful for testing purposes""" if document: cls.__already_indexed.discard(document) cls.__already_indexed.clear() @property def _collection(self): """Property that returns the collection object. This allows us to perform operations only if the collection is accessed. """ if self._document not in QuerySet.__already_indexed: # Ensure collection exists db = self._document._get_db() if self._collection_obj.name not in db.collection_names(): self._document._collection = None self._collection_obj = self._document._get_collection() QuerySet.__already_indexed.add(self._document) if self._document._meta.get('auto_create_index', True): self._ensure_indexes() return self._collection_obj @property def _cursor_args(self): cursor_args = { 'snapshot': self._snapshot, 'timeout': self._timeout, 'slave_okay': self._slave_okay } if self._loaded_fields: cursor_args['fields'] = self._loaded_fields.as_dict() return cursor_args @property def _cursor(self): if self._cursor_obj is None: self._cursor_obj = self._collection.find(self._query, **self._cursor_args) # Apply where clauses to cursor if self._where_clause: self._cursor_obj.where(self._where_clause) # apply default ordering if self._ordering: self._cursor_obj.sort(self._ordering) elif self._document._meta['ordering']: self.order_by(*self._document._meta['ordering']) if self._limit is not None: self._cursor_obj.limit(self._limit - (self._skip or 0)) if self._skip is not None: self._cursor_obj.skip(self._skip) if self._hint != -1: self._cursor_obj.hint(self._hint) return self._cursor_obj @classmethod def _lookup_field(cls, document, parts): """Lookup a field based on its attribute and return a list containing the field's parents and the field. """ if not isinstance(parts, (list, tuple)): parts = [parts] fields = [] field = None for field_name in parts: # Handle ListField indexing: if field_name.isdigit(): try: new_field = field.field except AttributeError, err: raise InvalidQueryError( "Can't use index on unsubscriptable field (%s)" % err) fields.append(field_name) continue if field is None: # Look up first field from the document if field_name == 'pk': # Deal with "primary key" alias field_name = document._meta['id_field'] if field_name in document._fields: field = document._fields[field_name] elif document._dynamic: from fields import DynamicField field = DynamicField(db_field=field_name) else: raise InvalidQueryError('Cannot resolve field "%s"' % field_name) else: from mongoengine.fields import ReferenceField, GenericReferenceField if isinstance(field, (ReferenceField, GenericReferenceField)): raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) if getattr(field, 'field', None): new_field = field.field.lookup_member(field_name) else: # Look up subfield on the previous field new_field = field.lookup_member(field_name) from base import ComplexBaseField if not new_field and isinstance(field, ComplexBaseField): fields.append(field_name) continue elif not new_field: raise InvalidQueryError('Cannot resolve field "%s"' % field_name) field = new_field # update field to the new field type fields.append(field) return fields @classmethod def _translate_field_name(cls, doc_cls, field, sep='.'): """Translate a field attribute name to a database field name. """ parts = field.split(sep) parts = [f.db_field for f in QuerySet._lookup_field(doc_cls, parts)] return '.'.join(parts) @classmethod def _transform_query(cls, _doc_cls=None, _field_operation=False, **query): """Transform a query from Django-style format to Mongo format. """ operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', 'all', 'size', 'exists', 'not'] geo_operators = ['within_distance', 'within_spherical_distance', 'within_box', 'within_polygon', 'near', 'near_sphere'] match_operators = ['contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'exact', 'iexact'] custom_operators = ['match'] mongo_query = {} for key, value in query.items(): if key == "__raw__": mongo_query.update(value) continue parts = key.split('__') indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] parts = [part for part in parts if not part.isdigit()] # Check for an operator and transform to mongo-style if there is op = None if parts[-1] in operators + match_operators + geo_operators + custom_operators: op = parts.pop() negate = False if parts[-1] == 'not': parts.pop() negate = True if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] fields = QuerySet._lookup_field(_doc_cls, parts) parts = [] cleaned_fields = [] for field in fields: append_field = True if isinstance(field, str): parts.append(field) append_field = False else: parts.append(field.db_field) if append_field: cleaned_fields.append(field) # Convert value to proper value field = cleaned_fields[-1] singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] singular_ops += match_operators if op in singular_ops: if isinstance(field, basestring): if op in match_operators and isinstance(value, basestring): from mongoengine import StringField value = StringField.prepare_query_value(op, value) else: value = field else: value = field.prepare_query_value(op, value) elif op in ('in', 'nin', 'all', 'near'): # 'in', 'nin' and 'all' require a list of values value = [field.prepare_query_value(op, v) for v in value] # if op and op not in match_operators: if op: if op in geo_operators: if op == "within_distance": value = {'$within': {'$center': value}} elif op == "within_spherical_distance": value = {'$within': {'$centerSphere': value}} elif op == "within_polygon": value = {'$within': {'$polygon': value}} elif op == "near": value = {'$near': value} elif op == "near_sphere": value = {'$nearSphere': value} elif op == 'within_box': value = {'$within': {'$box': value}} else: raise NotImplementedError("Geo method '%s' has not " "been implemented" % op) elif op in custom_operators: if op == 'match': value = {"$elemMatch": value} else: NotImplementedError("Custom method '%s' has not " "been implemented" % op) elif op not in match_operators: value = {'$' + op: value} if negate: value = {'$not': value} for i, part in indices: parts.insert(i, part) key = '.'.join(parts) if op is None or key not in mongo_query: mongo_query[key] = value elif key in mongo_query and isinstance(mongo_query[key], dict): mongo_query[key].update(value) return mongo_query def get(self, *q_objs, **query): """Retrieve the the matching object raising :class:`~mongoengine.queryset.MultipleObjectsReturned` or `DocumentName.MultipleObjectsReturned` exception if multiple results and :class:`~mongoengine.queryset.DoesNotExist` or `DocumentName.DoesNotExist` if no results are found. .. versionadded:: 0.3 """ self.limit(2) self.__call__(*q_objs, **query) try: result1 = self.next() except StopIteration: raise self._document.DoesNotExist("%s matching query does not exist." % self._document._class_name) try: result2 = self.next() except StopIteration: return result1 self.rewind() message = u'%d items returned, instead of 1' % self.count() raise self._document.MultipleObjectsReturned(message) def get_or_create(self, write_options=None, auto_save=True, *q_objs, **query): """Retrieve unique object or create, if it doesn't exist. Returns a tuple of ``(object, created)``, where ``object`` is the retrieved or created object and ``created`` is a boolean specifying whether a new object was created. Raises :class:`~mongoengine.queryset.MultipleObjectsReturned` or `DocumentName.MultipleObjectsReturned` if multiple results are found. A new document will be created if the document doesn't exists; a dictionary of default values for the new document may be provided as a keyword argument called :attr:`defaults`. .. note:: This requires two separate operations and therefore a race condition exists. Because there are no transactions in mongoDB other approaches should be investigated, to ensure you don't accidently duplicate data when using this method. :param write_options: optional extra keyword arguments used if we have to create a new document. Passes any write_options onto :meth:`~mongoengine.Document.save` .. versionadded:: 0.3 :param auto_save: if the object is to be saved automatically if not found. .. versionadded:: 0.6 """ defaults = query.get('defaults', {}) if 'defaults' in query: del query['defaults'] try: doc = self.get(*q_objs, **query) return doc, False except self._document.DoesNotExist: query.update(defaults) doc = self._document(**query) if auto_save: doc.save(write_options=write_options) return doc, True def create(self, **kwargs): """Create new object. Returns the saved object instance. .. versionadded:: 0.4 """ doc = self._document(**kwargs) doc.save() return doc def first(self): """Retrieve the first object matching the query. """ try: result = self[0] except IndexError: result = None return result def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None): """bulk insert documents If ``safe=True`` and the operation is unsuccessful, an :class:`~mongoengine.OperationError` will be raised. :param docs_or_doc: a document or list of documents to be inserted :param load_bulk (optional): If True returns the list of document instances :param safe: check if the operation succeeded before returning :param write_options: Extra keyword arguments are passed down to :meth:`~pymongo.collection.Collection.insert` which will be used as options for the resultant ``getLastError`` command. For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two servers have recorded the write and will force an fsync on each server being written to. By default returns document instances, set ``load_bulk`` to False to return just ``ObjectIds`` .. versionadded:: 0.5 """ from document import Document if not write_options: write_options = {} write_options.update({'safe': safe}) docs = doc_or_docs return_one = False if isinstance(docs, Document) or issubclass(docs.__class__, Document): return_one = True docs = [docs] raw = [] for doc in docs: if not isinstance(doc, self._document): msg = "Some documents inserted aren't instances of %s" % str(self._document) raise OperationError(msg) if doc.pk: msg = "Some documents have ObjectIds use doc.update() instead" raise OperationError(msg) raw.append(doc.to_mongo()) signals.pre_bulk_insert.send(self._document, documents=docs) try: ids = self._collection.insert(raw, **write_options) except pymongo.errors.OperationFailure, err: message = 'Could not save document (%s)' if u'duplicate key' in unicode(err): message = u'Tried to save duplicate unique keys (%s)' raise OperationError(message % unicode(err)) if not load_bulk: signals.post_bulk_insert.send( self._document, documents=docs, loaded=False) return return_one and ids[0] or ids documents = self.in_bulk(ids) results = [] for obj_id in ids: results.append(documents.get(obj_id)) signals.post_bulk_insert.send( self._document, documents=results, loaded=True) return return_one and results[0] or results def with_id(self, object_id): """Retrieve the object matching the id provided. Uses `object_id` only and raises InvalidQueryError if a filter has been applied. :param object_id: the value for the id of the document to look up .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set """ if not self._query_obj.empty: raise InvalidQueryError("Cannot use a filter whilst using `with_id`") return self.filter(pk=object_id).first() def in_bulk(self, object_ids): """Retrieve a set of documents by their ids. :param object_ids: a list or tuple of ``ObjectId``\ s :rtype: dict of ObjectIds as keys and collection-specific Document subclasses as values. .. versionadded:: 0.3 """ doc_map = {} docs = self._collection.find({'_id': {'$in': object_ids}}, **self._cursor_args) if self._scalar: for doc in docs: doc_map[doc['_id']] = self._get_scalar( self._document._from_son(doc)) else: for doc in docs: doc_map[doc['_id']] = self._document._from_son(doc) return doc_map def next(self): """Wrap the result in a :class:`~mongoengine.Document` object. """ self._iter = True try: if self._limit == 0: raise StopIteration if self._scalar: return self._get_scalar(self._document._from_son( self._cursor.next())) return self._document._from_son(self._cursor.next()) except StopIteration, e: self.rewind() raise e def rewind(self): """Rewind the cursor to its unevaluated state. .. versionadded:: 0.3 """ self._iter = False self._cursor.rewind() def count(self): """Count the selected elements in the query. """ if self._limit == 0: return 0 return self._cursor.count(with_limit_and_skip=True) def __len__(self): return self.count() def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None, scope=None): """Perform a map/reduce query using the current query spec and ordering. While ``map_reduce`` respects ``QuerySet`` chaining, it must be the last call made, as it does not return a maleable ``QuerySet``. See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce` and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` tests in ``tests.queryset.QuerySetTest`` for usage examples. :param map_f: map function, as :class:`~bson.code.Code` or string :param reduce_f: reduce function, as :class:`~bson.code.Code` or string :param output: output collection name, if set to 'inline' will try to use :class:`~pymongo.collection.Collection.inline_map_reduce` :param finalize_f: finalize function, an optional function that performs any post-reduction processing. :param scope: values to insert into map/reduce global scope. Optional. :param limit: number of objects from current query to provide to map/reduce method Returns an iterator yielding :class:`~mongoengine.document.MapReduceDocument`. .. note:: Map/Reduce changed in server version **>= 1.7.4**. The PyMongo :meth:`~pymongo.collection.Collection.map_reduce` helper requires PyMongo version **>= 1.11**. .. versionchanged:: 0.5 - removed ``keep_temp`` keyword argument, which was only relevant for MongoDB server versions older than 1.7.4 .. versionadded:: 0.3 """ from document import MapReduceDocument if not hasattr(self._collection, "map_reduce"): raise NotImplementedError("Requires MongoDB >= 1.7.1") map_f_scope = {} if isinstance(map_f, Code): map_f_scope = map_f.scope map_f = unicode(map_f) map_f = Code(self._sub_js_fields(map_f), map_f_scope) reduce_f_scope = {} if isinstance(reduce_f, Code): reduce_f_scope = reduce_f.scope reduce_f = unicode(reduce_f) reduce_f_code = self._sub_js_fields(reduce_f) reduce_f = Code(reduce_f_code, reduce_f_scope) mr_args = {'query': self._query} if finalize_f: finalize_f_scope = {} if isinstance(finalize_f, Code): finalize_f_scope = finalize_f.scope finalize_f = unicode(finalize_f) finalize_f_code = self._sub_js_fields(finalize_f) finalize_f = Code(finalize_f_code, finalize_f_scope) mr_args['finalize'] = finalize_f if scope: mr_args['scope'] = scope if limit: mr_args['limit'] = limit if output == 'inline' and not self._ordering: map_reduce_function = 'inline_map_reduce' else: map_reduce_function = 'map_reduce' mr_args['out'] = output results = getattr(self._collection, map_reduce_function)(map_f, reduce_f, **mr_args) if map_reduce_function == 'map_reduce': results = results.find() if self._ordering: results = results.sort(self._ordering) for doc in results: yield MapReduceDocument(self._document, self._collection, doc['_id'], doc['value']) def limit(self, n): """Limit the number of returned documents to `n`. This may also be achieved using array-slicing syntax (e.g. ``User.objects[:5]``). :param n: the maximum number of objects to return """ if n == 0: self._cursor.limit(1) else: self._cursor.limit(n) self._limit = n # Return self to allow chaining return self def skip(self, n): """Skip `n` documents before returning the results. This may also be achieved using array-slicing syntax (e.g. ``User.objects[5:]``). :param n: the number of objects to skip before returning results """ self._cursor.skip(n) self._skip = n return self def hint(self, index=None): """Added 'hint' support, telling Mongo the proper index to use for the query. Judicious use of hints can greatly improve query performance. When doing a query on multiple fields (at least one of which is indexed) pass the indexed field as a hint to the query. Hinting will not do anything if the corresponding index does not exist. The last hint applied to this cursor takes precedence over all others. .. versionadded:: 0.5 """ self._cursor.hint(index) self._hint = index return self def __getitem__(self, key): """Support skip and limit using getitem and slicing syntax. """ # Slice provided if isinstance(key, slice): try: self._cursor_obj = self._cursor[key] self._skip, self._limit = key.start, key.stop except IndexError, err: # PyMongo raises an error if key.start == key.stop, catch it, # bin it, kill it. start = key.start or 0 if start >= 0 and key.stop >= 0 and key.step is None: if start == key.stop: self.limit(0) self._skip, self._limit = key.start, key.stop - start return self raise err # Allow further QuerySet modifications to be performed return self # Integer index provided elif isinstance(key, int): if self._scalar: return self._get_scalar(self._document._from_son( self._cursor[key])) return self._document._from_son(self._cursor[key]) raise AttributeError def distinct(self, field): """Return a list of distinct values for a given field. :param field: the field to select distinct values from .. versionadded:: 0.4 .. versionchanged:: 0.5 - Fixed handling references """ from dereference import DeReference return DeReference()(self._cursor.distinct(field), 1) def only(self, *fields): """Load only a subset of this document's fields. :: post = BlogPost.objects(...).only("title", "author.name") :param fields: fields to include .. versionadded:: 0.3 .. versionchanged:: 0.5 - Added subfield support """ fields = dict([(f, QueryFieldList.ONLY) for f in fields]) return self.fields(**fields) def exclude(self, *fields): """Opposite to .only(), exclude some document's fields. :: post = BlogPost.objects(...).exclude("comments") :param fields: fields to exclude .. versionadded:: 0.5 """ fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields]) return self.fields(**fields) def fields(self, **kwargs): """Manipulate how you load this document's fields. Used by `.only()` and `.exclude()` to manipulate which fields to retrieve. Fields also allows for a greater level of control for example: Retrieving a Subrange of Array Elements: You can use the $slice operator to retrieve a subrange of elements in an array :: post = BlogPost.objects(...).fields(slice__comments=5) // first 5 comments :param kwargs: A dictionary identifying what to include .. versionadded:: 0.5 """ # Check for an operator and transform to mongo-style if there is operators = ["slice"] cleaned_fields = [] for key, value in kwargs.items(): parts = key.split('__') op = None if parts[0] in operators: op = parts.pop(0) value = {'$' + op: value} key = '.'.join(parts) cleaned_fields.append((key, value)) fields = sorted(cleaned_fields, key=operator.itemgetter(1)) for value, group in itertools.groupby(fields, lambda x: x[1]): fields = [field for field, value in group] fields = self._fields_to_dbfields(fields) self._loaded_fields += QueryFieldList(fields, value=value) return self def all_fields(self): """Include all fields. Reset all previously calls of .only() and .exclude(). :: post = BlogPost.objects(...).exclude("comments").only("title").all_fields() .. versionadded:: 0.5 """ self._loaded_fields = QueryFieldList(always_include=self._loaded_fields.always_include) return self def _fields_to_dbfields(self, fields): """Translate fields paths to its db equivalents""" ret = [] for field in fields: field = ".".join(f.db_field for f in QuerySet._lookup_field(self._document, field.split('.'))) ret.append(field) return ret def order_by(self, *keys): """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The order may be specified by prepending each of the keys by a + or a -. Ascending order is assumed. :param keys: fields to order the query results by; keys may be prefixed with **+** or **-** to determine the ordering direction """ key_list = [] for key in keys: if not key: continue direction = pymongo.ASCENDING if key[0] == '-': direction = pymongo.DESCENDING if key[0] in ('-', '+'): key = key[1:] key = key.replace('__', '.') try: key = QuerySet._translate_field_name(self._document, key) except: pass key_list.append((key, direction)) self._ordering = key_list self._cursor.sort(key_list) return self def explain(self, format=False): """Return an explain plan record for the :class:`~mongoengine.queryset.QuerySet`\ 's cursor. :param format: format the plan before returning it """ plan = self._cursor.explain() if format: plan = pprint.pformat(plan) return plan def snapshot(self, enabled): """Enable or disable snapshot mode when querying. :param enabled: whether or not snapshot mode is enabled ..versionchanged:: 0.5 - made chainable """ self._snapshot = enabled return self def timeout(self, enabled): """Enable or disable the default mongod timeout when querying. :param enabled: whether or not the timeout is used ..versionchanged:: 0.5 - made chainable """ self._timeout = enabled return self def slave_okay(self, enabled): """Enable or disable the slave_okay when querying. :param enabled: whether or not the slave_okay is enabled """ self._slave_okay = enabled return self def delete(self, safe=False): """Delete the documents matched by the query. :param safe: check if the operation succeeded before returning """ doc = self._document # Check for DENY rules before actually deleting/nullifying any other # references for rule_entry in doc._meta['delete_rules']: document_cls, field_name = rule_entry rule = doc._meta['delete_rules'][rule_entry] if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0: msg = u'Could not delete document (at least %s.%s refers to it)' % \ (document_cls.__name__, field_name) raise OperationError(msg) for rule_entry in doc._meta['delete_rules']: document_cls, field_name = rule_entry rule = doc._meta['delete_rules'][rule_entry] if rule == CASCADE: ref_q = document_cls.objects(**{field_name + '__in': self}) if doc != document_cls or (doc == document_cls and ref_q.count() > 0): ref_q.delete(safe=safe) elif rule == NULLIFY: document_cls.objects(**{field_name + '__in': self}).update( safe_update=safe, **{'unset__%s' % field_name: 1}) elif rule == PULL: document_cls.objects(**{field_name + '__in': self}).update( safe_update=safe, **{'pull_all__%s' % field_name: self}) self._collection.remove(self._query, safe=safe) @classmethod def _transform_update(cls, _doc_cls=None, **update): """Transform an update spec from Django-style format to Mongo format. """ operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all', 'pull', 'pull_all', 'add_to_set'] mongo_update = {} for key, value in update.items(): if key == "__raw__": mongo_update.update(value) continue parts = key.split('__') # Check for an operator and transform to mongo-style if there is op = None if parts[0] in operators: op = parts.pop(0) # Convert Pythonic names to Mongo equivalents if op in ('push_all', 'pull_all'): op = op.replace('_all', 'All') elif op == 'dec': # Support decrement by flipping a positive value's sign # and using 'inc' op = 'inc' if value > 0: value = -value elif op == 'add_to_set': op = op.replace('_to_set', 'ToSet') if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] fields = QuerySet._lookup_field(_doc_cls, parts) parts = [] cleaned_fields = [] for field in fields: append_field = True if isinstance(field, str): # Convert the S operator to $ if field == 'S': field = '$' parts.append(field) append_field = False else: parts.append(field.db_field) if append_field: cleaned_fields.append(field) # Convert value to proper value field = cleaned_fields[-1] if op in (None, 'set', 'push', 'pull', 'addToSet'): if field.required or value is not None: value = field.prepare_query_value(op, value) elif op in ('pushAll', 'pullAll'): value = [field.prepare_query_value(op, v) for v in value] key = '.'.join(parts) if not op: raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value") if 'pull' in op and '.' in key: # Dot operators don't work on pull operations # it uses nested dict syntax if op == 'pullAll': raise InvalidQueryError("pullAll operations only support a single field depth") parts.reverse() for key in parts: value = {key: value} else: value = {key: value} key = '$' + op if key not in mongo_update: mongo_update[key] = value elif key in mongo_update and isinstance(mongo_update[key], dict): mongo_update[key].update(value) return mongo_update def update(self, safe_update=True, upsert=False, multi=True, write_options=None, **update): """Perform an atomic update on the fields matched by the query. When ``safe_update`` is used, the number of affected documents is returned. :param safe_update: check if the operation succeeded before returning :param upsert: Any existing document with that "_id" is overwritten. :param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update` .. versionadded:: 0.2 """ if not update: raise OperationError("No update parameters, would remove data") if not write_options: write_options = {} update = QuerySet._transform_update(self._document, **update) query = self._query # SERVER-5247 hack remove_types = "_types" in query and ".$." in unicode(update) if remove_types: del query["_types"] try: ret = self._collection.update(query, update, multi=multi, upsert=upsert, safe=safe_update, **write_options) if ret is not None and 'n' in ret: return ret['n'] except pymongo.errors.OperationFailure, err: if unicode(err) == u'multi not coded yet': message = u'update() method requires MongoDB 1.1.3+' raise OperationError(message) raise OperationError(u'Update failed (%s)' % unicode(err)) def update_one(self, safe_update=True, upsert=False, write_options=None, **update): """Perform an atomic update on first field matched by the query. When ``safe_update`` is used, the number of affected documents is returned. :param safe_update: check if the operation succeeded before returning :param upsert: Any existing document with that "_id" is overwritten. :param write_options: extra keyword arguments for :meth:`~pymongo.collection.Collection.update` :param update: Django-style update keyword arguments .. versionadded:: 0.2 """ if not update: raise OperationError("No update parameters, would remove data") if not write_options: write_options = {} update = QuerySet._transform_update(self._document, **update) query = self._query # SERVER-5247 hack remove_types = "_types" in query and ".$." in unicode(update) if remove_types: del query["_types"] try: # Explicitly provide 'multi=False' to newer versions of PyMongo # as the default may change to 'True' ret = self._collection.update(query, update, multi=False, upsert=upsert, safe=safe_update, **write_options) if ret is not None and 'n' in ret: return ret['n'] except pymongo.errors.OperationFailure, e: raise OperationError(u'Update failed [%s]' % unicode(e)) def __iter__(self): self.rewind() return self def _get_scalar(self, doc): def lookup(obj, name): chunks = name.split('__') for chunk in chunks: obj = getattr(obj, chunk) return obj data = [lookup(doc, n) for n in self._scalar] if len(data) == 1: return data[0] return tuple(data) def scalar(self, *fields): """Instead of returning Document instances, return either a specific value or a tuple of values in order. This effects all results and can be unset by calling ``scalar`` without arguments. Calls ``only`` automatically. :param fields: One or more fields to return instead of a Document. """ self._scalar = list(fields) if fields: self.only(*fields) else: self.all_fields() return self def values_list(self, *fields): """An alias for scalar""" return self.scalar(*fields) def _sub_js_fields(self, code): """When fields are specified with [~fieldname] syntax, where *fieldname* is the Python name of a field, *fieldname* will be substituted for the MongoDB name of the field (specified using the :attr:`name` keyword argument in a field's constructor). """ def field_sub(match): # Extract just the field name, and look up the field objects field_name = match.group(1).split('.') fields = QuerySet._lookup_field(self._document, field_name) # Substitute the correct name for the field into the javascript return u'["%s"]' % fields[-1].db_field def field_path_sub(match): # Extract just the field name, and look up the field objects field_name = match.group(1).split('.') fields = QuerySet._lookup_field(self._document, field_name) # Substitute the correct name for the field into the javascript return ".".join([f.db_field for f in fields]) code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, code) return code def exec_js(self, code, *fields, **options): """Execute a Javascript function on the server. A list of fields may be provided, which will be translated to their correct names and supplied as the arguments to the function. A few extra variables are added to the function's scope: ``collection``, which is the name of the collection in use; ``query``, which is an object representing the current query; and ``options``, which is an object containing any options specified as keyword arguments. As fields in MongoEngine may use different names in the database (set using the :attr:`db_field` keyword argument to a :class:`Field` constructor), a mechanism exists for replacing MongoEngine field names with the database field names in Javascript code. When accessing a field, use square-bracket notation, and prefix the MongoEngine field name with a tilde (~). :param code: a string of Javascript code to execute :param fields: fields that you will be using in your function, which will be passed in to your function as arguments :param options: options that you want available to the function (accessed in Javascript through the ``options`` object) """ code = self._sub_js_fields(code) fields = [QuerySet._translate_field_name(self._document, f) for f in fields] collection = self._document._get_collection_name() scope = { 'collection': collection, 'options': options or {}, } query = self._query if self._where_clause: query['$where'] = self._where_clause scope['query'] = query code = Code(code, scope=scope) db = self._document._get_db() return db.eval(code, *fields) def where(self, where_clause): """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript expression). Performs automatic field name substitution like :meth:`mongoengine.queryset.Queryset.exec_js`. .. note:: When using this mode of query, the database will call your function, or evaluate your predicate clause, for each object in the collection. .. versionadded:: 0.5 """ where_clause = self._sub_js_fields(where_clause) self._where_clause = where_clause return self def sum(self, field): """Sum over the values of the specified field. :param field: the field to sum over; use dot-notation to refer to embedded document fields .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work with sharding. """ map_func = Code(""" function() { emit(1, this[field] || 0); } """, scope={'field': field}) reduce_func = Code(""" function(key, values) { var sum = 0; for (var i in values) { sum += values[i]; } return sum; } """) for result in self.map_reduce(map_func, reduce_func, output='inline'): return result.value else: return 0 def average(self, field): """Average over the values of the specified field. :param field: the field to average over; use dot-notation to refer to embedded document fields .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work with sharding. """ map_func = Code(""" function() { if (this.hasOwnProperty(field)) emit(1, {t: this[field] || 0, c: 1}); } """, scope={'field': field}) reduce_func = Code(""" function(key, values) { var out = {t: 0, c: 0}; for (var i in values) { var value = values[i]; out.t += value.t; out.c += value.c; } return out; } """) finalize_func = Code(""" function(key, value) { return value.t / value.c; } """) for result in self.map_reduce(map_func, reduce_func, finalize_f=finalize_func, output='inline'): return result.value else: return 0 def item_frequencies(self, field, normalize=False, map_reduce=True): """Returns a dictionary of all items present in a field across the whole queried set of documents, and their corresponding frequency. This is useful for generating tag clouds, or searching documents. .. note:: Can only do direct simple mappings and cannot map across :class:`~mongoengine.ReferenceField` or :class:`~mongoengine.GenericReferenceField` for more complex counting a manual map reduce call would is required. If the field is a :class:`~mongoengine.ListField`, the items within each list will be counted individually. :param field: the field to use :param normalize: normalize the results so they add to 1.0 :param map_reduce: Use map_reduce over exec_js .. versionchanged:: 0.5 defaults to map_reduce and can handle embedded document lookups """ if map_reduce: return self._item_frequencies_map_reduce(field, normalize=normalize) return self._item_frequencies_exec_js(field, normalize=normalize) def _item_frequencies_map_reduce(self, field, normalize=False): map_func = """ function() { path = '{{~%(field)s}}'.split('.'); field = this; for (p in path) { if (field) field = field[path[p]]; else break; } if (field && field.constructor == Array) { field.forEach(function(item) { emit(item, 1); }); } else if (field) { emit(field, 1); } else { emit(null, 1); } } """ % dict(field=field) reduce_func = """ function(key, values) { var total = 0; var valuesSize = values.length; for (var i=0; i < valuesSize; i++) { total += parseInt(values[i], 10); } return total; } """ values = self.map_reduce(map_func, reduce_func, 'inline') frequencies = {} for f in values: key = f.key if isinstance(key, float): if int(key) == key: key = int(key) key = str(key) frequencies[key] = f.value if normalize: count = sum(frequencies.values()) frequencies = dict([(k, v / count) for k, v in frequencies.items()]) return frequencies def _item_frequencies_exec_js(self, field, normalize=False): """Uses exec_js to execute""" freq_func = """ function(path) { path = path.split('.'); if (options.normalize) { var total = 0.0; db[collection].find(query).forEach(function(doc) { field = doc; for (p in path) { if (field) field = field[path[p]]; else break; } if (field && field.constructor == Array) { total += field.length; } else { total++; } }); } var frequencies = {}; var inc = 1.0; if (options.normalize) { inc /= total; } db[collection].find(query).forEach(function(doc) { field = doc; for (p in path) { if (field) field = field[path[p]]; else break; } if (field && field.constructor == Array) { field.forEach(function(item) { frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); }); } else { var item = field; frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); } }); return frequencies; } """ data = self.exec_js(freq_func, field, normalize=normalize) if 'undefined' in data: data[None] = data['undefined'] del(data['undefined']) return data def __repr__(self): """Provides the string representation of the QuerySet .. versionchanged:: 0.6.13 Now doesnt modify the cursor """ if self._iter: return '.. queryset mid-iteration ..' data = [] for i in xrange(REPR_OUTPUT_SIZE + 1): try: data.append(self.next()) except StopIteration: break if len(data) > REPR_OUTPUT_SIZE: data[-1] = "...(remaining elements truncated)..." self.rewind() return repr(data) def select_related(self, max_depth=1): """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to a maximum depth in order to cut down the number queries to mongodb. .. versionadded:: 0.5 """ from dereference import DeReference # Make select related work the same for querysets max_depth += 1 return DeReference()(self, max_depth=max_depth) class QuerySetManager(object): get_queryset = None def __init__(self, queryset_func=None): if queryset_func: self.get_queryset = queryset_func self._collections = {} def __get__(self, instance, owner): """Descriptor for instantiating a new QuerySet object when Document.objects is accessed. """ if instance is not None: # Document class being used rather than a document object return self # owner is the document that contains the QuerySetManager queryset_class = owner._meta['queryset_class'] or QuerySet queryset = queryset_class(owner, owner._get_collection()) if self.get_queryset: if self.get_queryset.func_code.co_argcount == 1: queryset = self.get_queryset(queryset) else: queryset = self.get_queryset(owner, queryset) return queryset def queryset_manager(func): """Decorator that allows you to define custom QuerySet managers on :class:`~mongoengine.Document` classes. The manager must be a function that accepts a :class:`~mongoengine.Document` class as its first argument, and a :class:`~mongoengine.queryset.QuerySet` as its second argument. The method function should return a :class:`~mongoengine.queryset.QuerySet`, probably the same one that was passed in, but modified in some way. """ if func.func_code.co_argcount == 1: import warnings msg = 'Methods decorated with queryset_manager should take 2 arguments' warnings.warn(msg, DeprecationWarning) return QuerySetManager(func) MongoEngine-mongoengine-7a1b110/mongoengine/signals.py000066400000000000000000000032311177143177100230670ustar00rootroot00000000000000# -*- coding: utf-8 -*- __all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', 'pre_delete', 'post_delete'] signals_available = False try: from blinker import Namespace signals_available = True except ImportError: class Namespace(object): def signal(self, name, doc=None): return _FakeSignal(name, doc) class _FakeSignal(object): """If blinker is unavailable, create a fake class with the same interface that allows sending of signals but will fail with an error on anything else. Instead of doing anything on send, it will just ignore the arguments and do nothing instead. """ def __init__(self, name, doc=None): self.name = name self.__doc__ = doc def _fail(self, *args, **kwargs): raise RuntimeError('signalling support is unavailable ' 'because the blinker library is ' 'not installed.') send = lambda *a, **kw: None connect = disconnect = has_receivers_for = receivers_for = \ temporarily_connected_to = _fail del _fail # the namespace for code signals. If you are not mongoengine code, do # not put signals in here. Create your own namespace instead. _signals = Namespace() pre_init = _signals.signal('pre_init') post_init = _signals.signal('post_init') pre_save = _signals.signal('pre_save') post_save = _signals.signal('post_save') pre_delete = _signals.signal('pre_delete') post_delete = _signals.signal('post_delete') pre_bulk_insert = _signals.signal('pre_bulk_insert') post_bulk_insert = _signals.signal('post_bulk_insert') MongoEngine-mongoengine-7a1b110/mongoengine/tests.py000066400000000000000000000032541177143177100225760ustar00rootroot00000000000000from mongoengine.connection import get_db class query_counter(object): """ Query_counter contextmanager to get the number of queries. """ def __init__(self): """ Construct the query_counter. """ self.counter = 0 self.db = get_db() def __enter__(self): """ On every with block we need to drop the profile collection. """ self.db.set_profiling_level(0) self.db.system.profile.drop() self.db.set_profiling_level(2) return self def __exit__(self, t, value, traceback): """ Reset the profiling level. """ self.db.set_profiling_level(0) def __eq__(self, value): """ == Compare querycounter. """ return value == self._get_count() def __ne__(self, value): """ != Compare querycounter. """ return not self.__eq__(value) def __lt__(self, value): """ < Compare querycounter. """ return self._get_count() < value def __le__(self, value): """ <= Compare querycounter. """ return self._get_count() <= value def __gt__(self, value): """ > Compare querycounter. """ return self._get_count() > value def __ge__(self, value): """ >= Compare querycounter. """ return self._get_count() >= value def __int__(self): """ int representation. """ return self._get_count() def __repr__(self): """ repr query_counter as the number of queries. """ return u"%s" % self._get_count() def _get_count(self): """ Get the number of queries. """ count = self.db.system.profile.find().count() - self.counter self.counter += 1 return count MongoEngine-mongoengine-7a1b110/python-mongoengine.spec000066400000000000000000000027351177143177100232600ustar00rootroot00000000000000# sitelib for noarch packages, sitearch for others (remove the unneeded one) %{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} %{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} %define srcname mongoengine Name: python-%{srcname} Version: 0.6.13 Release: 1%{?dist} Summary: A Python Document-Object Mapper for working with MongoDB Group: Development/Libraries License: MIT URL: https://github.com/MongoEngine/mongoengine Source0: %{srcname}-%{version}.tar.bz2 BuildRequires: python-devel BuildRequires: python-setuptools Requires: mongodb Requires: pymongo Requires: python-blinker Requires: python-imaging %description MongoEngine is an ORM-like layer on top of PyMongo. %prep %setup -q -n %{srcname}-%{version} %build # Remove CFLAGS=... for noarch packages (unneeded) CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build %install rm -rf $RPM_BUILD_ROOT %{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT %clean rm -rf $RPM_BUILD_ROOT %files %defattr(-,root,root,-) %doc docs AUTHORS LICENSE README.rst # For noarch packages: sitelib %{python_sitelib}/* # For arch-specific packages: sitearch # %{python_sitearch}/* %changelog * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.htmlMongoEngine-mongoengine-7a1b110/requirements.txt000066400000000000000000000000071177143177100220320ustar00rootroot00000000000000pymongoMongoEngine-mongoengine-7a1b110/setup.cfg000066400000000000000000000003321177143177100203700ustar00rootroot00000000000000[aliases] test = nosetests [nosetests] verbosity = 2 detailed-errors = 1 #with-coverage = 1 cover-html = 1 cover-html-dir = ../htmlcov cover-package = mongoengine cover-erase = 1 where = tests #tests = test_bugfix.py MongoEngine-mongoengine-7a1b110/setup.py000066400000000000000000000032431177143177100202650ustar00rootroot00000000000000from setuptools import setup, find_packages import os DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB" LONG_DESCRIPTION = None try: LONG_DESCRIPTION = open('README.rst').read() except: pass def get_version(version_tuple): version = '%s.%s' % (version_tuple[0], version_tuple[1]) if version_tuple[2]: version = '%s.%s' % (version, version_tuple[2]) return version # Dirty hack to get version number from monogengine/__init__.py - we can't # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0] VERSION = get_version(eval(version_line.split('=')[-1])) print VERSION CLASSIFIERS = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Database', 'Topic :: Software Development :: Libraries :: Python Modules', ] setup(name='mongoengine', version=VERSION, packages=find_packages(), author='Harry Marr', author_email='harry.marr@{nospam}gmail.com', maintainer="Ross Lawley", maintainer_email="ross.lawley@{nospam}gmail.com", url='http://mongoengine.org/', license='MIT', include_package_data=True, description=DESCRIPTION, long_description=LONG_DESCRIPTION, platforms=['any'], classifiers=CLASSIFIERS, install_requires=['pymongo'], tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] ) MongoEngine-mongoengine-7a1b110/tests/000077500000000000000000000000001177143177100177135ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/tests/__init__.py000066400000000000000000000000001177143177100220120ustar00rootroot00000000000000MongoEngine-mongoengine-7a1b110/tests/fixtures.py000066400000000000000000000007501177143177100221400ustar00rootroot00000000000000from datetime import datetime from mongoengine import * class PickleEmbedded(EmbeddedDocument): date = DateTimeField(default=datetime.now) class PickleTest(Document): number = IntField() string = StringField(choices=(('One', '1'), ('Two', '2'))) embedded = EmbeddedDocumentField(PickleEmbedded) lists = ListField(StringField()) photo = FileField() class Mixin(object): name = StringField() class Base(Document): meta = {'allow_inheritance': True} MongoEngine-mongoengine-7a1b110/tests/mongoengine.png000066400000000000000000000201711177143177100227270ustar00rootroot00000000000000PNG  IHDRsLW^tEXtSoftwareAdobe ImageReadyqe< IDATx] ^e Ij0ysHsKc¢p*%rvM3dUVJ+O71V3J &O}*"_곴:'8TɼyzYKs(KѱJҀ{vΕ U1(^U+<F꧉ykbU "/ȸ}r f&k2|cWLvj+/#ZRv)_%s xtȔr$D@D΂Ch;0Yw.stCr3!RfU"E|Hs:E_ʡLtǶŸbi8d E| $E`edBTzOK:}_d +G7g.Z-bz0ܒEnl&+`U2ƆV^rR̛D6YIO׊]4q˒EIi$9o*w5qvd{V;Y( ru-,,lN$nmz}v%xW*".rknVרgȢUydnaa69"w-j=Z9!qV?ȷWꏻC"~i"sz7\h d 'fh\!Q# x!8 JRƫ:O_RDn"7З*dcQ|cÍ$_(Fyk&>-&dnaa@d(M%66x!"ᕪ\/_1N|s[2 ̬Fݲx˟)qlaLUU;5)RninKNJ,_/KG."K3Q#X"؈:A7NR_#ʉL3)j# PAdlum4En҂Ik'E'-һK&9H7/kk+_T b\Q2n[4Ŋk*-[XXdH:5krp۫4kҒnXbeqȈI0n~UkudjLl]`"XIZ*G-];Hk1ɁwPT4Nqׅoj?ZY`"zZ9aXOil!d* _[}y/;pAKvf7NdJ,jmj'@-,,ZVȊTRx:ҿJWXR nĆ &6Wo0SdvMV3h5(➼xC[ؼeeFjmP?vhlWzvzh爈Ui+D\5e%s VҿG|]dЮΦ|J5˥i5A;kȪM˒NU ۢ߷ yhVTR cDt ZX2hP\" Mim.0d[c WRsΪCܝb6v+d&HUTJ/l#UM-0|*r*}Tw) AwU7둗RYU v{9ZTwj[mƺ&WA/ d*ޒ]Y^ڀ*#UK7=Uޡ/Í&WSE]`56u@"ME5Hj>';@.R|Ղ>"SkUf/&ijt}e)͍]ؖ:[ǾCB߫)Uy[)Sxv?l?C{?0VQHtG*rO*MUQ>adK-SRUH $UƳwrA>S5Unt8|]?ArUQ7!' 踏Wt9LWo>os*;Y&yzKJJBΝ.WL*~h!b~i<nB?bq~ +eA,1 6wS.vp?p$:[AVq&چؖ_F;ֿUdrDE/r9xH'=4NTtG/ɼ2rz  *7qi;q?Ttp^HewUW#sk(H|\GB.:BF=MF|St&Z/ ӤD\/\Q7$ #ȳ.!~JpjoD$r`/j{/ r Ud p) a( cG4wy-Q3Q.r.dX(<*AByq & (K~C^HBژSADI3~(o9!Bp'=$%Z-:J6wOqߥx=w|Éa\{8WGN(K\ x_9.J6y7rх}XjcVS֕3>~}{\"A?⼞䜃b֡% Us^e 2g0yJvDu5 ^n'+{`2fNjMz>.حx1p9dTc=$3VG-m/ޡ>Ǜm|et3G;ϗdS p $l[QC!s(5mqmM3?х21qǷCN>():,eGzM}A7;<+= ^K@,Xx#DK̄XkhGԟ{l ֹgQ"Zc߯5gCgt?R>Ǟ?{6a^Ϻ 򄀍-w˧)Ok:J w]O רAocJtO "C@kGf9&&Nh| B1O"Qv Mב~`864Wd*([%;L^d^5eOă~zϦRhp7-)CE>2hxޝ\1!;ŮߧS.&Ȏ.~1V/Ay>b_x`=9X:׃+t] j([u>r;4g\sFT4X༅i.s {QG3? gUMm? d |&PsV ǿAmJ5&|-۩LoEU$Y- >ӕə<@"ᑬ@ji!Hak܋Į!vn|F9Յцk9:9pG'D?Tk5X {SqCJZq۹܍_srsXʥeBNOsPj]CN%TPTgnrs"&>ʣE# pϻ Z?1O]ѱ/l|tۘ|w{ 5X^ȳzwxG)L* Ǧs|L0]| ȅlEg4˶ .=ByV+Y;z/KȞ49OoF:\i"?'Qū>hW_֗FGOpuָcQ<)+yh~c҉E "+6ȎJAp]ʈmpk.6'<ФoZV:V.tV@wb~7:Ba1N˾rukP.2 ?`Lnp]"tC^3h6nNRn88k!o)/}vbD J^=s٦lE$bMXb8^ZZGb;R2is'7OwhxɢQ_I/@вq;cAlVN4}À%dxGIӲdR+ w4h:ȆM4/s#FCvwwa_ qBc6Y5;\5⽂spYm}#XA6 ҍF.05XIh6B揊=tfЖMuT4yt0a%ByYY&\JCb'}0A |Zk1s1aj"H]s^]Xa03Ĺ'~ch%p^ a2Ј6m7ܙdT1ZL'*ju7hqaa -1}uvD: 8vV:t7>0 OhIfZ쓞 W d{|]LzJ8?M8a$mm !{? 9zax;z-w JלMሀ rn=øYHB-ks{= LP >Ă&.{e<Ā3O'T'* .>tI#1)Ou %x'e%0Qw̓`+spOfê<ʠ9Ӥ7Iµp76^*ɥku~܉Y0+ZyIVn\!%Ribtn" Ӈo ^ K-5}RӒD%$6i lsL%'m I yhTS.9xV'G V-%p n6 kc}0O $gWZMyz<+aΒ0d^;8C%0"X:~ i ߍlI^2 \i?0I{v H&( %G.v :""=~։?iRa l Hc;hY>|?9SXb0WL>~W!cG;y׊>* mxP9݊)tS|>lR.BhgƼwмdM[o3DgDۄP1[ csrvd V,Kh[2^7$7)|ZW``RϴHI]qSblC综IS1;J'ZvаțcW?|l{U6. #"z _ 2`0G{bp~N{"Z܈AL "l7dM%1,>8pvy;Y} a1lqqwa |ȧ#6XeΔ(r =?'K50E g'un9%H,LR+9'g4ǒvy|Ĝh:gg'vUƩt%0Etw(#/Hdk wC9ن.X 0ɳB{: 3l;W8,n>s\g?3m>c)g=,2&2hbQ͙Ts .$dZ ju 5Ei\.d&q_8Vb#>H"+$zsC )v+Fu)g[\$ZC.3qWF_aEP&{;fJ¹$_Cm7_Zcv4SgJcfWY.F~/A " Uf ⅶy-0秶yrk&XXD|Цi@ȮnRlX2h[@.k I>h[@iy,[$$E:4F}ʠ}axmނ_k19 u46qr(M`!UȏЫ7ɲMj@),kJ85 B7B#1A7r Kf{mTKmbK&v* HSKIXp/D!b?X䍰(L\ɿZb>JQ.vG^:`KhؗL"{`km4i~ ɵLxZE°>sq\,$YM\.,QEUXLv-[Hi,U a5;7ڦ-xvmKsmY, n@CEZoS‹v<Jh=]2yD0!+咙L\;[$]n l^@sc$FB䌘_2Fn~ hHrն`l/(IENDB`MongoEngine-mongoengine-7a1b110/tests/test_connection.py000066400000000000000000000061421177143177100234660ustar00rootroot00000000000000import datetime import pymongo import unittest import mongoengine.connection from bson.tz_util import utc from mongoengine import * from mongoengine.connection import get_db, get_connection, ConnectionError class ConnectionTest(unittest.TestCase): def tearDown(self): mongoengine.connection._connection_settings = {} mongoengine.connection._connections = {} mongoengine.connection._dbs = {} def test_connect(self): """Ensure that the connect() method works properly. """ connect('mongoenginetest') conn = get_connection() self.assertTrue(isinstance(conn, pymongo.connection.Connection)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') connect('mongoenginetest2', alias='testdb') conn = get_connection('testdb') self.assertTrue(isinstance(conn, pymongo.connection.Connection)) def test_connect_uri(self): """Ensure that the connect() method works properly with uri's """ c = connect(db='mongoenginetest', alias='admin') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({}) c.admin.add_user("admin", "password") c.admin.authenticate("admin", "password") c.mongoenginetest.add_user("username", "password") self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') conn = get_connection() self.assertTrue(isinstance(conn, pymongo.connection.Connection)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ register_connection('testdb', 'mongoenginetest2') self.assertRaises(ConnectionError, get_connection) conn = get_connection('testdb') self.assertTrue(isinstance(conn, pymongo.connection.Connection)) db = get_db('testdb') self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest2') def test_connection_kwargs(self): """Ensure that connection kwargs get passed to pymongo. """ connect('mongoenginetest', alias='t1', tz_aware=True) conn = get_connection('t1') self.assertTrue(conn.tz_aware) connect('mongoenginetest2', alias='t2') conn = get_connection('t2') self.assertFalse(conn.tz_aware) def test_datetime(self): connect('mongoenginetest', tz_aware=True) d = datetime.datetime(2010, 5, 5, tzinfo=utc) class DateDoc(Document): the_date = DateTimeField(required=True) DateDoc.drop_collection() DateDoc(the_date=d).save() date_doc = DateDoc.objects.first() self.assertEqual(d, date_doc.the_date) if __name__ == '__main__': unittest.main() MongoEngine-mongoengine-7a1b110/tests/test_dereference.py000066400000000000000000000563371177143177100236110ustar00rootroot00000000000000import unittest from mongoengine import * from mongoengine.connection import get_db from mongoengine.tests import query_counter class FieldTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') self.db = get_db() def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ class User(Document): name = StringField() class Group(Document): members = ListField(ReferenceField(User)) User.drop_collection() Group.drop_collection() for i in xrange(1, 51): user = User(name='user %s' % i) user.save() group = Group(members=User.objects) group.save() group = Group(members=User.objects) group.save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 2) # Document select_related with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first().select_related() self.assertEqual(q, 2) [m for m in group_obj.members] self.assertEqual(q, 2) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) group_objs = Group.objects.select_related() self.assertEqual(q, 2) for group_obj in group_objs: [m for m in group_obj.members] self.assertEqual(q, 2) User.drop_collection() Group.drop_collection() def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ class Employee(Document): name = StringField() boss = ReferenceField('self') friends = ListField(ReferenceField('self')) Employee.drop_collection() bill = Employee(name='Bill Lumbergh') bill.save() michael = Employee(name='Michael Bolton') michael.save() samir = Employee(name='Samir Nagheenanajar') samir.save() friends = [michael, samir] peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) peter.save() Employee(name='Funky Gibbon', boss=bill, friends=friends).save() Employee(name='Funky Gibbon', boss=bill, friends=friends).save() Employee(name='Funky Gibbon', boss=bill, friends=friends).save() with query_counter() as q: self.assertEqual(q, 0) peter = Employee.objects.with_id(peter.id) self.assertEqual(q, 1) peter.boss self.assertEqual(q, 2) peter.friends self.assertEqual(q, 3) # Document select_related with query_counter() as q: self.assertEqual(q, 0) peter = Employee.objects.with_id(peter.id).select_related() self.assertEqual(q, 2) self.assertEquals(peter.boss, bill) self.assertEqual(q, 2) self.assertEquals(peter.friends, friends) self.assertEqual(q, 2) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) employees = Employee.objects(boss=bill).select_related() self.assertEqual(q, 2) for employee in employees: self.assertEquals(employee.boss, bill) self.assertEqual(q, 2) self.assertEquals(employee.friends, friends) self.assertEqual(q, 2) def test_circular_reference(self): """Ensure you can handle circular references """ class Person(Document): name = StringField() relations = ListField(EmbeddedDocumentField('Relation')) def __repr__(self): return "" % self.name class Relation(EmbeddedDocument): name = StringField() person = ReferenceField('Person') Person.drop_collection() mother = Person(name="Mother") daughter = Person(name="Daughter") mother.save() daughter.save() daughter_rel = Relation(name="Daughter", person=daughter) mother.relations.append(daughter_rel) mother.save() mother_rel = Relation(name="Daughter", person=mother) self_rel = Relation(name="Self", person=daughter) daughter.relations.append(mother_rel) daughter.relations.append(self_rel) daughter.save() self.assertEquals("[, ]", "%s" % Person.objects()) def test_circular_reference_on_self(self): """Ensure you can handle circular references """ class Person(Document): name = StringField() relations = ListField(ReferenceField('self')) def __repr__(self): return "" % self.name Person.drop_collection() mother = Person(name="Mother") daughter = Person(name="Daughter") mother.save() daughter.save() mother.relations.append(daughter) mother.save() daughter.relations.append(mother) daughter.relations.append(daughter) daughter.save() self.assertEquals("[, ]", "%s" % Person.objects()) def test_circular_tree_reference(self): """Ensure you can handle circular references with more than one level """ class Other(EmbeddedDocument): name = StringField() friends = ListField(ReferenceField('Person')) class Person(Document): name = StringField() other = EmbeddedDocumentField(Other, default=lambda: Other()) def __repr__(self): return "" % self.name Person.drop_collection() paul = Person(name="Paul") paul.save() maria = Person(name="Maria") maria.save() julia = Person(name='Julia') julia.save() anna = Person(name='Anna') anna.save() paul.other.friends = [maria, julia, anna] paul.other.name = "Paul's friends" paul.save() maria.other.friends = [paul, julia, anna] maria.other.name = "Maria's friends" maria.save() julia.other.friends = [paul, maria, anna] julia.other.name = "Julia's friends" julia.save() anna.other.friends = [paul, maria, julia] anna.other.name = "Anna's friends" anna.save() self.assertEquals( "[, , , ]", "%s" % Person.objects() ) def test_generic_reference(self): class UserA(Document): name = StringField() class UserB(Document): name = StringField() class UserC(Document): name = StringField() class Group(Document): members = ListField(GenericReferenceField()) UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() members = [] for i in xrange(1, 51): a = UserA(name='User A %s' % i) a.save() b = UserB(name='User B %s' % i) b.save() c = UserC(name='User C %s' % i) c.save() members += [a, b, c] group = Group(members=members) group.save() group = Group(members=members) group.save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) # Document select_related with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first().select_related() self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) group_objs = Group.objects.select_related() self.assertEqual(q, 4) for group_obj in group_objs: [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() def test_list_field_complex(self): class UserA(Document): name = StringField() class UserB(Document): name = StringField() class UserC(Document): name = StringField() class Group(Document): members = ListField() UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() members = [] for i in xrange(1, 51): a = UserA(name='User A %s' % i) a.save() b = UserB(name='User B %s' % i) b.save() c = UserC(name='User C %s' % i) c.save() members += [a, b, c] group = Group(members=members) group.save() group = Group(members=members) group.save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) # Document select_related with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first().select_related() self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) group_objs = Group.objects.select_related() self.assertEqual(q, 4) for group_obj in group_objs: [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for m in group_obj.members: self.assertTrue('User' in m.__class__.__name__) UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() def test_map_field_reference(self): class User(Document): name = StringField() class Group(Document): members = MapField(ReferenceField(User)) User.drop_collection() Group.drop_collection() members = [] for i in xrange(1, 51): user = User(name='user %s' % i) user.save() members.append(user) group = Group(members=dict([(str(u.id), u) for u in members])) group.save() group = Group(members=dict([(str(u.id), u) for u in members])) group.save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, User)) # Document select_related with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first().select_related() self.assertEqual(q, 2) [m for m in group_obj.members] self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, User)) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) group_objs = Group.objects.select_related() self.assertEqual(q, 2) for group_obj in group_objs: [m for m in group_obj.members] self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, User)) User.drop_collection() Group.drop_collection() def test_dict_field(self): class UserA(Document): name = StringField() class UserB(Document): name = StringField() class UserC(Document): name = StringField() class Group(Document): members = DictField() UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() members = [] for i in xrange(1, 51): a = UserA(name='User A %s' % i) a.save() b = UserB(name='User B %s' % i) b.save() c = UserC(name='User C %s' % i) c.save() members += [a, b, c] group = Group(members=dict([(str(u.id), u) for u in members])) group.save() group = Group(members=dict([(str(u.id), u) for u in members])) group.save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) # Document select_related with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first().select_related() self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) group_objs = Group.objects.select_related() self.assertEqual(q, 4) for group_obj in group_objs: [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) Group.objects.delete() Group().save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 1) self.assertEqual(group_obj.members, {}) UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() def test_dict_field_no_field_inheritance(self): class UserA(Document): name = StringField() meta = {'allow_inheritance': False} class Group(Document): members = DictField() UserA.drop_collection() Group.drop_collection() members = [] for i in xrange(1, 51): a = UserA(name='User A %s' % i) a.save() members += [a] group = Group(members=dict([(str(u.id), u) for u in members])) group.save() group = Group(members=dict([(str(u.id), u) for u in members])) group.save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 2) [m for m in group_obj.members] self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, UserA)) # Document select_related with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first().select_related() self.assertEqual(q, 2) [m for m in group_obj.members] self.assertEqual(q, 2) [m for m in group_obj.members] self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, UserA)) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) group_objs = Group.objects.select_related() self.assertEqual(q, 2) for group_obj in group_objs: [m for m in group_obj.members] self.assertEqual(q, 2) [m for m in group_obj.members] self.assertEqual(q, 2) for k, m in group_obj.members.iteritems(): self.assertTrue(isinstance(m, UserA)) UserA.drop_collection() Group.drop_collection() def test_generic_reference_map_field(self): class UserA(Document): name = StringField() class UserB(Document): name = StringField() class UserC(Document): name = StringField() class Group(Document): members = MapField(GenericReferenceField()) UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() members = [] for i in xrange(1, 51): a = UserA(name='User A %s' % i) a.save() b = UserB(name='User B %s' % i) b.save() c = UserC(name='User C %s' % i) c.save() members += [a, b, c] group = Group(members=dict([(str(u.id), u) for u in members])) group.save() group = Group(members=dict([(str(u.id), u) for u in members])) group.save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) # Document select_related with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first().select_related() self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) # Queryset select_related with query_counter() as q: self.assertEqual(q, 0) group_objs = Group.objects.select_related() self.assertEqual(q, 4) for group_obj in group_objs: [m for m in group_obj.members] self.assertEqual(q, 4) [m for m in group_obj.members] self.assertEqual(q, 4) for k, m in group_obj.members.iteritems(): self.assertTrue('User' in m.__class__.__name__) Group.objects.delete() Group().save() with query_counter() as q: self.assertEqual(q, 0) group_obj = Group.objects.first() self.assertEqual(q, 1) [m for m in group_obj.members] self.assertEqual(q, 1) UserA.drop_collection() UserB.drop_collection() UserC.drop_collection() Group.drop_collection() def test_multidirectional_lists(self): class Asset(Document): name = StringField(max_length=250, required=True) parent = GenericReferenceField(default=None) parents = ListField(GenericReferenceField()) children = ListField(GenericReferenceField()) Asset.drop_collection() root = Asset(name='', path="/", title="Site Root") root.save() company = Asset(name='company', title='Company', parent=root, parents=[root]) company.save() root.children = [company] root.save() root = root.reload() self.assertEquals(root.children, [company]) self.assertEquals(company.parents, [root]) def test_dict_in_dbref_instance(self): class Person(Document): name = StringField(max_length=250, required=True) class Room(Document): number = StringField(max_length=250, required=True) staffs_with_position = ListField(DictField()) Person.drop_collection() Room.drop_collection() bob = Person.objects.create(name='Bob') bob.save() sarah = Person.objects.create(name='Sarah') sarah.save() room_101 = Room.objects.create(number="101") room_101.staffs_with_position = [ {'position_key': 'window', 'staff': sarah}, {'position_key': 'door', 'staff': bob.to_dbref()}] room_101.save() room = Room.objects.first().select_related() self.assertEquals(room.staffs_with_position[0]['staff'], sarah) self.assertEquals(room.staffs_with_position[1]['staff'], bob) def test_document_reload_no_inheritance(self): class Foo(Document): meta = {'allow_inheritance': False} bar = ReferenceField('Bar') baz = ReferenceField('Baz') class Bar(Document): meta = {'allow_inheritance': False} msg = StringField(required=True, default='Blammo!') class Baz(Document): meta = {'allow_inheritance': False} msg = StringField(required=True, default='Kaboom!') Foo.drop_collection() Bar.drop_collection() Baz.drop_collection() bar = Bar() bar.save() baz = Baz() baz.save() foo = Foo() foo.bar = bar foo.baz = baz foo.save() foo.reload() self.assertEquals(type(foo.bar), Bar) self.assertEquals(type(foo.baz), Baz) MongoEngine-mongoengine-7a1b110/tests/test_django.py000066400000000000000000000066711177143177100226000ustar00rootroot00000000000000# -*- coding: utf-8 -*- import unittest from mongoengine import * from mongoengine.django.shortcuts import get_document_or_404 from django.http import Http404 from django.template import Context, Template from django.conf import settings from django.core.paginator import Paginator settings.configure() from django.contrib.sessions.tests import SessionTestsMixin from mongoengine.django.sessions import SessionStore, MongoSession class QuerySetTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') class Person(Document): name = StringField() age = IntField() self.Person = Person def test_order_by_in_django_template(self): """Ensure that QuerySets are properly ordered in Django template. """ self.Person.drop_collection() self.Person(name="A", age=20).save() self.Person(name="D", age=10).save() self.Person(name="B", age=40).save() self.Person(name="C", age=30).save() t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") d = {"ol": self.Person.objects.order_by('-name')} self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:') d = {"ol": self.Person.objects.order_by('+name')} self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:') d = {"ol": self.Person.objects.order_by('-age')} self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:') d = {"ol": self.Person.objects.order_by('+age')} self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:') self.Person.drop_collection() def test_q_object_filter_in_template(self): self.Person.drop_collection() self.Person(name="A", age=20).save() self.Person(name="D", age=10).save() self.Person(name="B", age=40).save() self.Person(name="C", age=30).save() t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))} self.assertEqual(t.render(Context(d)), 'D-10:C-30:') # Check double rendering doesn't throw an error self.assertEqual(t.render(Context(d)), 'D-10:C-30:') def test_get_document_or_404(self): p = self.Person(name="G404") p.save() self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) def test_pagination(self): """Ensure that Pagination works as expected """ class Page(Document): name = StringField() Page.drop_collection() for i in xrange(1, 11): Page(name=str(i)).save() paginator = Paginator(Page.objects.all(), 2) t = Template("{% for i in page.object_list %}{{ i.name }}:{% endfor %}") for p in paginator.page_range: d = {"page": paginator.page(p)} end = p * 2 start = end - 1 self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): backend = SessionStore def setUp(self): connect(db='mongoenginetest') MongoSession.drop_collection() super(MongoDBSessionTest, self).setUp() def test_first_save(self): session = SessionStore() session['test'] = True session.save() self.assertTrue('test' in session) MongoEngine-mongoengine-7a1b110/tests/test_document.py000066400000000000000000003142111177143177100231440ustar00rootroot00000000000000import os import pickle import pymongo import bson import unittest import warnings from datetime import datetime from tests.fixtures import Base, Mixin, PickleEmbedded, PickleTest from mongoengine import * from mongoengine.base import NotRegistered, InvalidDocumentError from mongoengine.queryset import InvalidQueryError from mongoengine.connection import get_db TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') class DocumentTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') self.db = get_db() class Person(Document): name = StringField() age = IntField() meta = {'allow_inheritance': True} self.Person = Person def tearDown(self): self.Person.drop_collection() def test_future_warning(self): """Add FutureWarning for future allow_inhertiance default change. """ with warnings.catch_warnings(True) as errors: class SimpleBase(Document): a = IntField() class InheritedClass(SimpleBase): b = IntField() InheritedClass() self.assertEquals(len(errors), 1) warning = errors[0] self.assertEquals(FutureWarning, warning.category) self.assertTrue("InheritedClass" in warning.message.message) def test_drop_collection(self): """Ensure that the collection may be dropped from the database. """ self.Person(name='Test').save() collection = self.Person._get_collection_name() self.assertTrue(collection in self.db.collection_names()) self.Person.drop_collection() self.assertFalse(collection in self.db.collection_names()) def test_queryset_resurrects_dropped_collection(self): self.Person.objects().item_frequencies('name') self.Person.drop_collection() self.assertEqual({}, self.Person.objects().item_frequencies('name')) class Actor(self.Person): pass # Ensure works correctly with inhertited classes Actor.objects().item_frequencies('name') self.Person.drop_collection() self.assertEqual({}, Actor.objects().item_frequencies('name')) def test_definition(self): """Ensure that document may be defined using fields. """ name_field = StringField() age_field = IntField() class Person(Document): name = name_field age = age_field non_field = True self.assertEqual(Person._fields['name'], name_field) self.assertEqual(Person._fields['age'], age_field) self.assertFalse('non_field' in Person._fields) self.assertTrue('id' in Person._fields) # Test iteration over fields fields = list(Person()) self.assertTrue('name' in fields and 'age' in fields) # Ensure Document isn't treated like an actual document self.assertFalse(hasattr(Document, '_fields')) def test_collection_naming(self): """Ensure that a collection with a specified name may be used. """ class DefaultNamingTest(Document): pass self.assertEquals('default_naming_test', DefaultNamingTest._get_collection_name()) class CustomNamingTest(Document): meta = {'collection': 'pimp_my_collection'} self.assertEquals('pimp_my_collection', CustomNamingTest._get_collection_name()) class DynamicNamingTest(Document): meta = {'collection': lambda c: "DYNAMO"} self.assertEquals('DYNAMO', DynamicNamingTest._get_collection_name()) # Use Abstract class to handle backwards compatibility class BaseDocument(Document): meta = { 'abstract': True, 'collection': lambda c: c.__name__.lower() } class OldNamingConvention(BaseDocument): pass self.assertEquals('oldnamingconvention', OldNamingConvention._get_collection_name()) class InheritedAbstractNamingTest(BaseDocument): meta = {'collection': 'wibble'} self.assertEquals('wibble', InheritedAbstractNamingTest._get_collection_name()) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") class NonAbstractBase(Document): pass class InheritedDocumentFailTest(NonAbstractBase): meta = {'collection': 'fail'} self.assertTrue(issubclass(w[0].category, SyntaxWarning)) self.assertEquals('non_abstract_base', InheritedDocumentFailTest._get_collection_name()) # Mixin tests class BaseMixin(object): meta = { 'collection': lambda c: c.__name__.lower() } class OldMixinNamingConvention(Document, BaseMixin): pass self.assertEquals('oldmixinnamingconvention', OldMixinNamingConvention._get_collection_name()) class BaseMixin(object): meta = { 'collection': lambda c: c.__name__.lower() } class BaseDocument(Document, BaseMixin): meta = {'allow_inheritance': True} class MyDocument(BaseDocument): pass self.assertEquals('basedocument', MyDocument._get_collection_name()) def test_get_superclasses(self): """Ensure that the correct list of superclasses is assembled. """ class Animal(Document): meta = {'allow_inheritance': True} class Fish(Animal): pass class Mammal(Animal): pass class Human(Mammal): pass class Dog(Mammal): pass mammal_superclasses = {'Animal': Animal} self.assertEqual(Mammal._superclasses, mammal_superclasses) dog_superclasses = { 'Animal': Animal, 'Animal.Mammal': Mammal, } self.assertEqual(Dog._superclasses, dog_superclasses) def test_external_superclasses(self): """Ensure that the correct list of sub and super classes is assembled. when importing part of the model """ class Animal(Base): pass class Fish(Animal): pass class Mammal(Animal): pass class Human(Mammal): pass class Dog(Mammal): pass mammal_superclasses = {'Base': Base, 'Base.Animal': Animal} self.assertEqual(Mammal._superclasses, mammal_superclasses) dog_superclasses = { 'Base': Base, 'Base.Animal': Animal, 'Base.Animal.Mammal': Mammal, } self.assertEqual(Dog._superclasses, dog_superclasses) Base.drop_collection() h = Human() h.save() self.assertEquals(Human.objects.count(), 1) self.assertEquals(Mammal.objects.count(), 1) self.assertEquals(Animal.objects.count(), 1) self.assertEquals(Base.objects.count(), 1) Base.drop_collection() def test_polymorphic_queries(self): """Ensure that the correct subclasses are returned from a query""" class Animal(Document): meta = {'allow_inheritance': True} class Fish(Animal): pass class Mammal(Animal): pass class Human(Mammal): pass class Dog(Mammal): pass Animal.drop_collection() Animal().save() Fish().save() Mammal().save() Human().save() Dog().save() classes = [obj.__class__ for obj in Animal.objects] self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) classes = [obj.__class__ for obj in Mammal.objects] self.assertEqual(classes, [Mammal, Human, Dog]) classes = [obj.__class__ for obj in Human.objects] self.assertEqual(classes, [Human]) Animal.drop_collection() def test_polymorphic_references(self): """Ensure that the correct subclasses are returned from a query when using references / generic references """ class Animal(Document): meta = {'allow_inheritance': True} class Fish(Animal): pass class Mammal(Animal): pass class Human(Mammal): pass class Dog(Mammal): pass class Zoo(Document): animals = ListField(ReferenceField(Animal)) Zoo.drop_collection() Animal.drop_collection() Animal().save() Fish().save() Mammal().save() Human().save() Dog().save() # Save a reference to each animal zoo = Zoo(animals=Animal.objects) zoo.save() zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) Zoo.drop_collection() class Zoo(Document): animals = ListField(GenericReferenceField(Animal)) # Save a reference to each animal zoo = Zoo(animals=Animal.objects) zoo.save() zoo.reload() classes = [a.__class__ for a in Zoo.objects.first().animals] self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) Zoo.drop_collection() Animal.drop_collection() def test_reference_inheritance(self): class Stats(Document): created = DateTimeField(default=datetime.now) meta = {'allow_inheritance': False} class CompareStats(Document): generated = DateTimeField(default=datetime.now) stats = ListField(ReferenceField(Stats)) Stats.drop_collection() CompareStats.drop_collection() list_stats = [] for i in xrange(10): s = Stats() s.save() list_stats.append(s) cmp_stats = CompareStats(stats=list_stats) cmp_stats.save() self.assertEqual(list_stats, CompareStats.objects.first().stats) def test_inheritance(self): """Ensure that document may inherit fields from a superclass document. """ class Employee(self.Person): salary = IntField() self.assertTrue('name' in Employee._fields) self.assertTrue('salary' in Employee._fields) self.assertEqual(Employee._get_collection_name(), self.Person._get_collection_name()) # Ensure that MRO error is not raised class A(Document): meta = {'allow_inheritance': True} class B(A): pass class C(B): pass def test_allow_inheritance(self): """Ensure that inheritance may be disabled on simple classes and that _cls and _types will not be used. """ class Animal(Document): name = StringField() meta = {'allow_inheritance': False} Animal.drop_collection() def create_dog_class(): class Dog(Animal): pass self.assertRaises(ValueError, create_dog_class) # Check that _cls etc aren't present on simple documents dog = Animal(name='dog') dog.save() collection = self.db[Animal._get_collection_name()] obj = collection.find_one() self.assertFalse('_cls' in obj) self.assertFalse('_types' in obj) Animal.drop_collection() def create_employee_class(): class Employee(self.Person): meta = {'allow_inheritance': False} self.assertRaises(ValueError, create_employee_class) # Test the same for embedded documents class Comment(EmbeddedDocument): content = StringField() meta = {'allow_inheritance': False} def create_special_comment(): class SpecialComment(Comment): pass self.assertRaises(ValueError, create_special_comment) comment = Comment(content='test') self.assertFalse('_cls' in comment.to_mongo()) self.assertFalse('_types' in comment.to_mongo()) def test_allow_inheritance_abstract_document(self): """Ensure that abstract documents can set inheritance rules and that _cls and _types will not be used. """ class FinalDocument(Document): meta = {'abstract': True, 'allow_inheritance': False} class Animal(FinalDocument): name = StringField() Animal.drop_collection() def create_dog_class(): class Dog(Animal): pass self.assertRaises(ValueError, create_dog_class) # Check that _cls etc aren't present on simple documents dog = Animal(name='dog') dog.save() collection = self.db[Animal._get_collection_name()] obj = collection.find_one() self.assertFalse('_cls' in obj) self.assertFalse('_types' in obj) Animal.drop_collection() def test_how_to_turn_off_inheritance(self): """Demonstrates migrating from allow_inheritance = True to False. """ class Animal(Document): name = StringField() meta = { 'indexes': ['name'] } Animal.drop_collection() dog = Animal(name='dog') dog.save() collection = self.db[Animal._get_collection_name()] obj = collection.find_one() self.assertTrue('_cls' in obj) self.assertTrue('_types' in obj) info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertEquals([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) # Turn off inheritance class Animal(Document): name = StringField() meta = { 'allow_inheritance': False, 'indexes': ['name'] } collection.update({}, {"$unset": {"_types": 1, "_cls": 1}}, multi=True) # Confirm extra data is removed obj = collection.find_one() self.assertFalse('_cls' in obj) self.assertFalse('_types' in obj) info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertEquals([[(u'_id', 1)], [(u'_types', 1), (u'name', 1)]], info) info = collection.index_information() indexes_to_drop = [key for key, value in info.iteritems() if '_types' in dict(value['key'])] for index in indexes_to_drop: collection.drop_index(index) info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertEquals([[(u'_id', 1)]], info) # Recreate indexes dog = Animal.objects.first() dog.save() info = collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertEquals([[(u'_id', 1)], [(u'name', 1),]], info) Animal.drop_collection() def test_abstract_documents(self): """Ensure that a document superclass can be marked as abstract thereby not using it as the name for the collection.""" class Animal(Document): name = StringField() meta = {'abstract': True} class Fish(Animal): pass class Guppy(Fish): pass class Mammal(Animal): meta = {'abstract': True} class Human(Mammal): pass self.assertFalse('collection' in Animal._meta) self.assertFalse('collection' in Mammal._meta) self.assertEqual(Animal._get_collection_name(), None) self.assertEqual(Mammal._get_collection_name(), None) self.assertEqual(Fish._get_collection_name(), 'fish') self.assertEqual(Guppy._get_collection_name(), 'fish') self.assertEqual(Human._get_collection_name(), 'human') def create_bad_abstract(): class EvilHuman(Human): evil = BooleanField(default=True) meta = {'abstract': True} self.assertRaises(ValueError, create_bad_abstract) def test_collection_name(self): """Ensure that a collection with a specified name may be used. """ collection = 'personCollTest' if collection in self.db.collection_names(): self.db.drop_collection(collection) class Person(Document): name = StringField() meta = {'collection': collection} user = Person(name="Test User") user.save() self.assertTrue(collection in self.db.collection_names()) user_obj = self.db[collection].find_one() self.assertEqual(user_obj['name'], "Test User") user_obj = Person.objects[0] self.assertEqual(user_obj.name, "Test User") Person.drop_collection() self.assertFalse(collection in self.db.collection_names()) def test_collection_name_and_primary(self): """Ensure that a collection with a specified name may be used. """ class Person(Document): name = StringField(primary_key=True) meta = {'collection': 'app'} user = Person(name="Test User") user.save() user_obj = Person.objects[0] self.assertEqual(user_obj.name, "Test User") Person.drop_collection() def test_inherited_collections(self): """Ensure that subclassed documents don't override parents' collections. """ with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter("always") class Drink(Document): name = StringField() class AlcoholicDrink(Drink): meta = {'collection': 'booze'} class Drinker(Document): drink = GenericReferenceField() # Confirm we triggered a SyntaxWarning assert issubclass(w[0].category, SyntaxWarning) Drink.drop_collection() AlcoholicDrink.drop_collection() Drinker.drop_collection() red_bull = Drink(name='Red Bull') red_bull.save() programmer = Drinker(drink=red_bull) programmer.save() beer = AlcoholicDrink(name='Beer') beer.save() real_person = Drinker(drink=beer) real_person.save() self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) self.assertEqual(Drinker.objects[1].drink.name, beer.name) def test_capped_collection(self): """Ensure that capped collections work properly. """ class Log(Document): date = DateTimeField(default=datetime.now) meta = { 'max_documents': 10, 'max_size': 90000, } Log.drop_collection() # Ensure that the collection handles up to its maximum for i in range(10): Log().save() self.assertEqual(len(Log.objects), 10) # Check that extra documents don't increase the size Log().save() self.assertEqual(len(Log.objects), 10) options = Log.objects._collection.options() self.assertEqual(options['capped'], True) self.assertEqual(options['max'], 10) self.assertEqual(options['size'], 90000) # Check that the document cannot be redefined with different options def recreate_log_document(): class Log(Document): date = DateTimeField(default=datetime.now) meta = { 'max_documents': 11, } # Create the collection by accessing Document.objects Log.objects self.assertRaises(InvalidCollectionError, recreate_log_document) Log.drop_collection() def test_indexes(self): """Ensure that indexes are used when meta[indexes] is specified. """ class BlogPost(Document): date = DateTimeField(db_field='addDate', default=datetime.now) category = StringField() tags = ListField(StringField()) meta = { 'indexes': [ '-date', 'tags', ('category', '-date') ], 'allow_inheritance': True } BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # _id, '-date', 'tags', ('cat', 'date') # NB: there is no index on _types by itself, since # the indices on -date and tags will both contain # _types as first element in the key self.assertEqual(len(info), 4) # Indexes are lazy so use list() to perform query list(BlogPost.objects) info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] in info) self.assertTrue([('_types', 1), ('addDate', -1)] in info) # tags is a list field so it shouldn't have _types in the index self.assertTrue([('tags', 1)] in info) class ExtendedBlogPost(BlogPost): title = StringField() meta = {'indexes': ['title']} BlogPost.drop_collection() list(ExtendedBlogPost.objects) info = ExtendedBlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] in info) self.assertTrue([('_types', 1), ('addDate', -1)] in info) self.assertTrue([('_types', 1), ('title', 1)] in info) BlogPost.drop_collection() def test_db_field_load(self): """Ensure we load data correctly """ class Person(Document): name = StringField(required=True) _rank = StringField(required=False, db_field="rank") @property def rank(self): return self._rank or "Private" Person.drop_collection() Person(name="Jack", _rank="Corporal").save() Person(name="Fred").save() self.assertEquals(Person.objects.get(name="Jack").rank, "Corporal") self.assertEquals(Person.objects.get(name="Fred").rank, "Private") def test_explicit_geo2d_index(self): """Ensure that geo2d indexes work when created via meta[indexes] """ class Place(Document): location = DictField() meta = { 'indexes': [ '*location.point', ], } Place.drop_collection() info = Place.objects._collection.index_information() # Indexes are lazy so use list() to perform query list(Place.objects) info = Place.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('location.point', '2d')] in info) def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains dictionaries instead of lists. """ class BlogPost(Document): date = DateTimeField(db_field='addDate', default=datetime.now) category = StringField() tags = ListField(StringField()) meta = { 'indexes': [ { 'fields': ['-date'], 'unique': True, 'sparse': True, 'types': False }, ], } BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # _id, '-date' self.assertEqual(len(info), 3) # Indexes are lazy so use list() to perform query list(BlogPost.objects) info = BlogPost.objects._collection.index_information() info = [(value['key'], value.get('unique', False), value.get('sparse', False)) for key, value in info.iteritems()] self.assertTrue(([('addDate', -1)], True, True) in info) BlogPost.drop_collection() def test_abstract_index_inheritance(self): class UserBase(Document): meta = { 'abstract': True, 'indexes': ['user_guid'] } user_guid = StringField(required=True) class Person(UserBase): meta = { 'indexes': ['name'], } name = StringField() Person.drop_collection() p = Person(name="test", user_guid='123') p.save() self.assertEquals(1, Person.objects.count()) info = Person.objects._collection.index_information() self.assertEqual(info.keys(), ['_types_1_user_guid_1', '_id_', '_types_1_name_1']) Person.drop_collection() def test_disable_index_creation(self): """Tests setting auto_create_index to False on the connection will disable any index generation. """ class User(Document): meta = { 'indexes': ['user_guid'], 'auto_create_index': False } user_guid = StringField(required=True) User.drop_collection() u = User(user_guid='123') u.save() self.assertEquals(1, User.objects.count()) info = User.objects._collection.index_information() self.assertEqual(info.keys(), ['_id_']) User.drop_collection() def test_embedded_document_index(self): """Tests settings an index on an embedded document """ class Date(EmbeddedDocument): year = IntField(db_field='yr') class BlogPost(Document): title = StringField() date = EmbeddedDocumentField(Date) meta = { 'indexes': [ '-date.year' ], } BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() self.assertEqual(info.keys(), ['_types_1_date.yr_-1', '_id_']) BlogPost.drop_collection() def test_list_embedded_document_index(self): """Ensure list embedded documents can be indexed """ class Tag(EmbeddedDocument): name = StringField(db_field='tag') class BlogPost(Document): title = StringField() tags = ListField(EmbeddedDocumentField(Tag)) meta = { 'indexes': [ 'tags.name' ], } BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # we don't use _types in with list fields by default self.assertEqual(info.keys(), ['_id_', '_types_1', 'tags.tag_1']) post1 = BlogPost(title="Embedded Indexes tests in place", tags=[Tag(name="about"), Tag(name="time")] ) post1.save() BlogPost.drop_collection() def test_recursive_embedded_objects_dont_break_indexes(self): class RecursiveObject(EmbeddedDocument): obj = EmbeddedDocumentField('self') class RecursiveDocument(Document): recursive_obj = EmbeddedDocumentField(RecursiveObject) info = RecursiveDocument.objects._collection.index_information() self.assertEqual(info.keys(), ['_id_', '_types_1']) def test_geo_indexes_recursion(self): class User(Document): channel = ReferenceField('Channel') location = GeoPointField() class Channel(Document): user = ReferenceField('User') location = GeoPointField() self.assertEquals(len(User._geo_indices()), 2) def test_covered_index(self): """Ensure that covered indexes can be used """ class Test(Document): a = IntField() meta = { 'indexes': ['a'], 'allow_inheritance': False } Test.drop_collection() obj = Test(a=1) obj.save() # Need to be explicit about covered indexes as mongoDB doesn't know if # the documents returned might have more keys in that here. query_plan = Test.objects(id=obj.id).exclude('a').explain() self.assertFalse(query_plan['indexOnly']) query_plan = Test.objects(id=obj.id).only('id').explain() self.assertTrue(query_plan['indexOnly']) query_plan = Test.objects(a=1).only('a').exclude('id').explain() self.assertTrue(query_plan['indexOnly']) def test_index_on_id(self): class BlogPost(Document): meta = { 'indexes': [ ['categories', 'id'] ], 'allow_inheritance': False } title = StringField(required=True) description = StringField(required=True) categories = ListField() BlogPost.drop_collection() indexes = BlogPost.objects._collection.index_information() self.assertEquals(indexes['categories_1__id_1']['key'], [('categories', 1), ('_id', 1)]) def test_hint(self): class BlogPost(Document): tags = ListField(StringField()) meta = { 'indexes': [ 'tags', ], } BlogPost.drop_collection() for i in xrange(0, 10): tags = [("tag %i" % n) for n in xrange(0, i % 2)] BlogPost(tags=tags).save() self.assertEquals(BlogPost.objects.count(), 10) self.assertEquals(BlogPost.objects.hint().count(), 10) self.assertEquals(BlogPost.objects.hint([('tags', 1)]).count(), 10) self.assertEquals(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) def invalid_index(): BlogPost.objects.hint('tags') self.assertRaises(TypeError, invalid_index) def invalid_index_2(): return BlogPost.objects.hint(('tags', 1)) self.assertRaises(TypeError, invalid_index_2) def test_unique(self): """Ensure that uniqueness constraints are applied to fields. """ class BlogPost(Document): title = StringField() slug = StringField(unique=True) BlogPost.drop_collection() post1 = BlogPost(title='test1', slug='test') post1.save() # Two posts with the same slug is not allowed post2 = BlogPost(title='test2', slug='test') self.assertRaises(OperationError, post2.save) def test_unique_with(self): """Ensure that unique_with constraints are applied to fields. """ class Date(EmbeddedDocument): year = IntField(db_field='yr') class BlogPost(Document): title = StringField() date = EmbeddedDocumentField(Date) slug = StringField(unique_with='date.year') BlogPost.drop_collection() post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') post1.save() # day is different so won't raise exception post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') post2.save() # Now there will be two docs with the same slug and the same day: fail post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') self.assertRaises(OperationError, post3.save) BlogPost.drop_collection() def test_unique_embedded_document(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. """ class SubDocument(EmbeddedDocument): year = IntField(db_field='yr') slug = StringField(unique=True) class BlogPost(Document): title = StringField() sub = EmbeddedDocumentField(SubDocument) BlogPost.drop_collection() post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) post2.save() # Now there will be two docs with the same sub.slug post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) self.assertRaises(OperationError, post3.save) BlogPost.drop_collection() def test_unique_with_embedded_document_and_embedded_unique(self): """Ensure that uniqueness constraints are applied to fields on embedded documents. And work with unique_with as well. """ class SubDocument(EmbeddedDocument): year = IntField(db_field='yr') slug = StringField(unique=True) class BlogPost(Document): title = StringField(unique_with='sub.year') sub = EmbeddedDocumentField(SubDocument) BlogPost.drop_collection() post1 = BlogPost(title='test1', sub=SubDocument(year=2009, slug="test")) post1.save() # sub.slug is different so won't raise exception post2 = BlogPost(title='test2', sub=SubDocument(year=2010, slug='another-slug')) post2.save() # Now there will be two docs with the same sub.slug post3 = BlogPost(title='test3', sub=SubDocument(year=2010, slug='test')) self.assertRaises(OperationError, post3.save) # Now there will be two docs with the same title and year post3 = BlogPost(title='test1', sub=SubDocument(year=2009, slug='test-1')) self.assertRaises(OperationError, post3.save) BlogPost.drop_collection() def test_unique_and_indexes(self): """Ensure that 'unique' constraints aren't overridden by meta.indexes. """ class Customer(Document): cust_id = IntField(unique=True, required=True) meta = { 'indexes': ['cust_id'], 'allow_inheritance': False, } Customer.drop_collection() cust = Customer(cust_id=1) cust.save() cust_dupe = Customer(cust_id=1) try: cust_dupe.save() raise AssertionError, "We saved a dupe!" except OperationError: pass Customer.drop_collection() def test_unique_and_primary(self): """If you set a field as primary, then unexpected behaviour can occur. You won't create a duplicate but you will update an existing document. """ class User(Document): name = StringField(primary_key=True, unique=True) password = StringField() User.drop_collection() user = User(name='huangz', password='secret') user.save() user = User(name='huangz', password='secret2') user.save() self.assertEqual(User.objects.count(), 1) self.assertEqual(User.objects.get().password, 'secret2') User.drop_collection() def test_custom_id_field(self): """Ensure that documents may be created with custom primary keys. """ class User(Document): username = StringField(primary_key=True) name = StringField() meta = {'allow_inheritance': True} User.drop_collection() self.assertEqual(User._fields['username'].db_field, '_id') self.assertEqual(User._meta['id_field'], 'username') def create_invalid_user(): User(name='test').save() # no primary key field self.assertRaises(ValidationError, create_invalid_user) def define_invalid_user(): class EmailUser(User): email = StringField(primary_key=True) self.assertRaises(ValueError, define_invalid_user) class EmailUser(User): email = StringField() user = User(username='test', name='test user') user.save() user_obj = User.objects.first() self.assertEqual(user_obj.id, 'test') self.assertEqual(user_obj.pk, 'test') user_son = User.objects._collection.find_one() self.assertEqual(user_son['_id'], 'test') self.assertTrue('username' not in user_son['_id']) User.drop_collection() user = User(pk='mongo', name='mongo user') user.save() user_obj = User.objects.first() self.assertEqual(user_obj.id, 'mongo') self.assertEqual(user_obj.pk, 'mongo') user_son = User.objects._collection.find_one() self.assertEqual(user_son['_id'], 'mongo') self.assertTrue('username' not in user_son['_id']) User.drop_collection() def test_document_not_registered(self): class Place(Document): name = StringField() meta = {'allow_inheritance': True} class NicePlace(Place): pass Place.drop_collection() Place(name="London").save() NicePlace(name="Buckingham Palace").save() # Mimic Place and NicePlace definitions being in a different file # and the NicePlace model not being imported in at query time. from mongoengine.base import _document_registry del(_document_registry['Place.NicePlace']) def query_without_importing_nice_place(): print Place.objects.all() self.assertRaises(NotRegistered, query_without_importing_nice_place) def test_creation(self): """Ensure that document may be created using keyword arguments. """ person = self.Person(name="Test User", age=30) self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 30) def test_to_dbref(self): """Ensure that you can get a dbref of a document""" person = self.Person(name="Test User", age=30) self.assertRaises(OperationError, person.to_dbref) person.save() person.to_dbref() def test_reload(self): """Ensure that attributes may be reloaded. """ person = self.Person(name="Test User", age=20) person.save() person_obj = self.Person.objects.first() person_obj.name = "Mr Test User" person_obj.age = 21 person_obj.save() self.assertEqual(person.name, "Test User") self.assertEqual(person.age, 20) person.reload() self.assertEqual(person.name, "Mr Test User") self.assertEqual(person.age, 21) def test_reload_referencing(self): """Ensures reloading updates weakrefs correctly """ class Embedded(EmbeddedDocument): dict_field = DictField() list_field = ListField() class Doc(Document): dict_field = DictField() list_field = ListField() embedded_field = EmbeddedDocumentField(Embedded) Doc.drop_collection() doc = Doc() doc.dict_field = {'hello': 'world'} doc.list_field = ['1', 2, {'hello': 'world'}] embedded_1 = Embedded() embedded_1.dict_field = {'hello': 'world'} embedded_1.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field = embedded_1 doc.save() doc = doc.reload(10) doc.list_field.append(1) doc.dict_field['woot'] = "woot" doc.embedded_field.list_field.append(1) doc.embedded_field.dict_field['woot'] = "woot" self.assertEquals(doc._get_changed_fields(), [ 'list_field', 'dict_field', 'embedded_field.list_field', 'embedded_field.dict_field']) doc.save() doc = doc.reload(10) self.assertEquals(doc._get_changed_fields(), []) self.assertEquals(len(doc.list_field), 4) self.assertEquals(len(doc.dict_field), 2) self.assertEquals(len(doc.embedded_field.list_field), 4) self.assertEquals(len(doc.embedded_field.dict_field), 2) def test_dictionary_access(self): """Ensure that dictionary-style field access works properly. """ person = self.Person(name='Test User', age=30) self.assertEquals(person['name'], 'Test User') self.assertRaises(KeyError, person.__getitem__, 'salary') self.assertRaises(KeyError, person.__setitem__, 'salary', 50) person['name'] = 'Another User' self.assertEquals(person['name'], 'Another User') # Length = length(assigned fields + id) self.assertEquals(len(person), 3) self.assertTrue('age' in person) person.age = None self.assertFalse('age' in person) self.assertFalse('nationality' in person) def test_embedded_document(self): """Ensure that embedded documents are set up correctly. """ class Comment(EmbeddedDocument): content = StringField() self.assertTrue('content' in Comment._fields) self.assertFalse('id' in Comment._fields) self.assertFalse('collection' in Comment._meta) def test_embedded_document_validation(self): """Ensure that embedded documents may be validated. """ class Comment(EmbeddedDocument): date = DateTimeField() content = StringField(required=True) comment = Comment() self.assertRaises(ValidationError, comment.validate) comment.content = 'test' comment.validate() comment.date = 4 self.assertRaises(ValidationError, comment.validate) comment.date = datetime.now() comment.validate() def test_embedded_db_field_validate(self): class SubDoc(EmbeddedDocument): val = IntField() class Doc(Document): e = EmbeddedDocumentField(SubDoc, db_field='eb') Doc.drop_collection() Doc(e=SubDoc(val=15)).save() doc = Doc.objects.first() doc.validate() self.assertEquals([None, 'e'], doc._data.keys()) def test_save(self): """Ensure that a document may be saved in the database. """ # Create person object and save it to the database person = self.Person(name='Test User', age=30) person.save() # Ensure that the object is in the database collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(person_obj['name'], 'Test User') self.assertEqual(person_obj['age'], 30) self.assertEqual(person_obj['_id'], person.id) # Test skipping validation on save class Recipient(Document): email = EmailField(required=True) recipient = Recipient(email='root@localhost') self.assertRaises(ValidationError, recipient.save) try: recipient.save(validate=False) except ValidationError: self.fail() def test_save_to_a_value_that_equates_to_false(self): class Thing(EmbeddedDocument): count = IntField() class User(Document): thing = EmbeddedDocumentField(Thing) User.drop_collection() user = User(thing=Thing(count=1)) user.save() user.reload() user.thing.count = 0 user.save() user.reload() self.assertEquals(user.thing.count, 0) def test_save_max_recursion_not_hit(self): class Person(Document): name = StringField() parent = ReferenceField('self') friend = ReferenceField('self') Person.drop_collection() p1 = Person(name="Wilson Snr") p1.parent = None p1.save() p2 = Person(name="Wilson Jr") p2.parent = p1 p2.save() p1.friend = p2 p1.save() # Confirm can save and it resets the changed fields without hitting # max recursion error p0 = Person.objects.first() p0.name = 'wpjunior' p0.save() def test_save_max_recursion_not_hit_with_file_field(self): class Foo(Document): name = StringField() picture = FileField() bar = ReferenceField('self') Foo.drop_collection() a = Foo(name='hello') a.save() a.bar = a a.picture = open(TEST_IMAGE_PATH, 'rb') a.save() # Confirm can save and it resets the changed fields without hitting # max recursion error b = Foo.objects.with_id(a.id) b.name='world' b.save() self.assertEquals(b.picture, b.bar.picture, b.bar.bar.picture) def test_save_cascades(self): class Person(Document): name = StringField() parent = ReferenceField('self') Person.drop_collection() p1 = Person(name="Wilson Snr") p1.parent = None p1.save() p2 = Person(name="Wilson Jr") p2.parent = p1 p2.save() p = Person.objects(name="Wilson Jr").get() p.parent.name = "Daddy Wilson" p.save() p1.reload() self.assertEquals(p1.name, p.parent.name) def test_save_cascade_kwargs(self): class Person(Document): name = StringField() parent = ReferenceField('self') Person.drop_collection() p1 = Person(name="Wilson Snr") p1.parent = None p1.save() p2 = Person(name="Wilson Jr") p2.parent = p1 p2.save(force_insert=True, cascade_kwargs={"force_insert": False}) p = Person.objects(name="Wilson Jr").get() p.parent.name = "Daddy Wilson" p.save() p1.reload() self.assertEquals(p1.name, p.parent.name) def test_save_cascade_meta(self): class Person(Document): name = StringField() parent = ReferenceField('self') meta = {'cascade': False} Person.drop_collection() p1 = Person(name="Wilson Snr") p1.parent = None p1.save() p2 = Person(name="Wilson Jr") p2.parent = p1 p2.save() p = Person.objects(name="Wilson Jr").get() p.parent.name = "Daddy Wilson" p.save() p1.reload() self.assertNotEquals(p1.name, p.parent.name) p.save(cascade=True) p1.reload() self.assertEquals(p1.name, p.parent.name) def test_save_cascades_generically(self): class Person(Document): name = StringField() parent = GenericReferenceField() Person.drop_collection() p1 = Person(name="Wilson Snr") p1.save() p2 = Person(name="Wilson Jr") p2.parent = p1 p2.save() p = Person.objects(name="Wilson Jr").get() p.parent.name = "Daddy Wilson" p.save() p1.reload() self.assertEquals(p1.name, p.parent.name) def test_update(self): """Ensure that an existing document is updated instead of be overwritten. """ # Create person object and save it to the database person = self.Person(name='Test User', age=30) person.save() # Create same person object, with same id, without age same_person = self.Person(name='Test') same_person.id = person.id same_person.save() # Confirm only one object self.assertEquals(self.Person.objects.count(), 1) # reload person.reload() same_person.reload() # Confirm the same self.assertEqual(person, same_person) self.assertEqual(person.name, same_person.name) self.assertEqual(person.age, same_person.age) # Confirm the saved values self.assertEqual(person.name, 'Test') self.assertEqual(person.age, 30) # Test only / exclude only updates included fields person = self.Person.objects.only('name').get() person.name = 'User' person.save() person.reload() self.assertEqual(person.name, 'User') self.assertEqual(person.age, 30) # test exclude only updates set fields person = self.Person.objects.exclude('name').get() person.age = 21 person.save() person.reload() self.assertEqual(person.name, 'User') self.assertEqual(person.age, 21) # Test only / exclude can set non excluded / included fields person = self.Person.objects.only('name').get() person.name = 'Test' person.age = 30 person.save() person.reload() self.assertEqual(person.name, 'Test') self.assertEqual(person.age, 30) # test exclude only updates set fields person = self.Person.objects.exclude('name').get() person.name = 'User' person.age = 21 person.save() person.reload() self.assertEqual(person.name, 'User') self.assertEqual(person.age, 21) # Confirm does remove unrequired fields person = self.Person.objects.exclude('name').get() person.age = None person.save() person.reload() self.assertEqual(person.name, 'User') self.assertEqual(person.age, None) person = self.Person.objects.get() person.name = None person.age = None person.save() person.reload() self.assertEqual(person.name, None) self.assertEqual(person.age, None) def test_document_update(self): def update_not_saved_raises(): person = self.Person(name='dcrosta') person.update(set__name='Dan Crosta') self.assertRaises(OperationError, update_not_saved_raises) author = self.Person(name='dcrosta') author.save() author.update(set__name='Dan Crosta') author.reload() p1 = self.Person.objects.first() self.assertEquals(p1.name, author.name) def update_no_value_raises(): person = self.Person.objects.first() person.update() self.assertRaises(OperationError, update_no_value_raises) def update_no_op_raises(): person = self.Person.objects.first() person.update(name="Dan") self.assertRaises(InvalidQueryError, update_no_op_raises) def test_embedded_update(self): """ Test update on `EmbeddedDocumentField` fields """ class Page(EmbeddedDocument): log_message = StringField(verbose_name="Log message", required=True) class Site(Document): page = EmbeddedDocumentField(Page) Site.drop_collection() site = Site(page=Page(log_message="Warning: Dummy message")) site.save() # Update site = Site.objects.first() site.page.log_message = "Error: Dummy message" site.save() site = Site.objects.first() self.assertEqual(site.page.log_message, "Error: Dummy message") def test_embedded_update_db_field(self): """ Test update on `EmbeddedDocumentField` fields when db_field is other than default. """ class Page(EmbeddedDocument): log_message = StringField(verbose_name="Log message", db_field="page_log_message", required=True) class Site(Document): page = EmbeddedDocumentField(Page) Site.drop_collection() site = Site(page=Page(log_message="Warning: Dummy message")) site.save() # Update site = Site.objects.first() site.page.log_message = "Error: Dummy message" site.save() site = Site.objects.first() self.assertEqual(site.page.log_message, "Error: Dummy message") def test_circular_reference_deltas(self): class Person(Document): name = StringField() owns = ListField(ReferenceField('Organization')) class Organization(Document): name = StringField() owner = ReferenceField('Person') Person.drop_collection() Organization.drop_collection() person = Person(name="owner") person.save() organization = Organization(name="company") organization.save() person.owns.append(organization) organization.owner = person person.save() organization.save() p = Person.objects[0].select_related() o = Organization.objects.first() self.assertEquals(p.owns[0], o) self.assertEquals(o.owner, p) def test_circular_reference_deltas_2(self): class Person( Document ): name = StringField() owns = ListField( ReferenceField( 'Organization' ) ) employer = ReferenceField( 'Organization' ) class Organization( Document ): name = StringField() owner = ReferenceField( 'Person' ) employees = ListField( ReferenceField( 'Person' ) ) Person.drop_collection() Organization.drop_collection() person = Person( name="owner" ) person.save() employee = Person( name="employee" ) employee.save() organization = Organization( name="company" ) organization.save() person.owns.append( organization ) organization.owner = person organization.employees.append( employee ) employee.employer = organization person.save() organization.save() employee.save() p = Person.objects.get(name="owner") e = Person.objects.get(name="employee") o = Organization.objects.first() self.assertEquals(p.owns[0], o) self.assertEquals(o.owner, p) self.assertEquals(e.employer, o) def test_delta(self): class Doc(Document): string_field = StringField() int_field = IntField() dict_field = DictField() list_field = ListField() Doc.drop_collection() doc = Doc() doc.save() doc = Doc.objects.first() self.assertEquals(doc._get_changed_fields(), []) self.assertEquals(doc._delta(), ({}, {})) doc.string_field = 'hello' self.assertEquals(doc._get_changed_fields(), ['string_field']) self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) doc._changed_fields = [] doc.int_field = 1 self.assertEquals(doc._get_changed_fields(), ['int_field']) self.assertEquals(doc._delta(), ({'int_field': 1}, {})) doc._changed_fields = [] dict_value = {'hello': 'world', 'ping': 'pong'} doc.dict_field = dict_value self.assertEquals(doc._get_changed_fields(), ['dict_field']) self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) doc._changed_fields = [] list_value = ['1', 2, {'hello': 'world'}] doc.list_field = list_value self.assertEquals(doc._get_changed_fields(), ['list_field']) self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) # Test unsetting doc._changed_fields = [] doc.dict_field = {} self.assertEquals(doc._get_changed_fields(), ['dict_field']) self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) doc._changed_fields = [] doc.list_field = [] self.assertEquals(doc._get_changed_fields(), ['list_field']) self.assertEquals(doc._delta(), ({}, {'list_field': 1})) def test_delta_recursive(self): class Embedded(EmbeddedDocument): string_field = StringField() int_field = IntField() dict_field = DictField() list_field = ListField() class Doc(Document): string_field = StringField() int_field = IntField() dict_field = DictField() list_field = ListField() embedded_field = EmbeddedDocumentField(Embedded) Doc.drop_collection() doc = Doc() doc.save() doc = Doc.objects.first() self.assertEquals(doc._get_changed_fields(), []) self.assertEquals(doc._delta(), ({}, {})) embedded_1 = Embedded() embedded_1.string_field = 'hello' embedded_1.int_field = 1 embedded_1.dict_field = {'hello': 'world'} embedded_1.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field = embedded_1 self.assertEquals(doc._get_changed_fields(), ['embedded_field']) embedded_delta = { 'string_field': 'hello', 'int_field': 1, 'dict_field': {'hello': 'world'}, 'list_field': ['1', 2, {'hello': 'world'}] } self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) embedded_delta.update({ '_types': ['Embedded'], '_cls': 'Embedded', }) self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.dict_field = {} self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.dict_field, {}) doc.embedded_field.list_field = [] self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field, []) embedded_2 = Embedded() embedded_2.string_field = 'hello' embedded_2.int_field = 1 embedded_2.dict_field = {'hello': 'world'} embedded_2.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field.list_field = ['1', 2, embedded_2] self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { '_cls': 'Embedded', '_types': ['Embedded'], 'string_field': 'hello', 'dict_field': {'hello': 'world'}, 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], }] }, {})) self.assertEquals(doc._delta(), ({ 'embedded_field.list_field': ['1', 2, { '_cls': 'Embedded', '_types': ['Embedded'], 'string_field': 'hello', 'dict_field': {'hello': 'world'}, 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], }] }, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[0], '1') self.assertEquals(doc.embedded_field.list_field[1], 2) for k in doc.embedded_field.list_field[2]._fields: self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) doc.embedded_field.list_field[2].string_field = 'world' self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') # Test multiple assignments doc.embedded_field.list_field[2].string_field = 'hello world' doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { '_types': ['Embedded'], '_cls': 'Embedded', 'string_field': 'hello world', 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], 'dict_field': {'hello': 'world'}}]}, {})) self.assertEquals(doc._delta(), ({ 'embedded_field.list_field': ['1', 2, { '_types': ['Embedded'], '_cls': 'Embedded', 'string_field': 'hello world', 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], 'dict_field': {'hello': 'world'}} ]}, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.list_field[2].list_field.append(1) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) doc.embedded_field.list_field[2].list_field.sort() doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) del(doc.embedded_field.list_field[2].list_field[2]['hello']) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) doc.save() doc = doc.reload(10) del(doc.embedded_field.list_field[2].list_field) self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) doc.save() doc = doc.reload(10) doc.dict_field['Embedded'] = embedded_1 doc.save() doc = doc.reload(10) doc.dict_field['Embedded'].string_field = 'Hello World' self.assertEquals(doc._get_changed_fields(), ['dict_field.Embedded.string_field']) self.assertEquals(doc._delta(), ({'dict_field.Embedded.string_field': 'Hello World'}, {})) def test_delta_db_field(self): class Doc(Document): string_field = StringField(db_field='db_string_field') int_field = IntField(db_field='db_int_field') dict_field = DictField(db_field='db_dict_field') list_field = ListField(db_field='db_list_field') Doc.drop_collection() doc = Doc() doc.save() doc = Doc.objects.first() self.assertEquals(doc._get_changed_fields(), []) self.assertEquals(doc._delta(), ({}, {})) doc.string_field = 'hello' self.assertEquals(doc._get_changed_fields(), ['db_string_field']) self.assertEquals(doc._delta(), ({'db_string_field': 'hello'}, {})) doc._changed_fields = [] doc.int_field = 1 self.assertEquals(doc._get_changed_fields(), ['db_int_field']) self.assertEquals(doc._delta(), ({'db_int_field': 1}, {})) doc._changed_fields = [] dict_value = {'hello': 'world', 'ping': 'pong'} doc.dict_field = dict_value self.assertEquals(doc._get_changed_fields(), ['db_dict_field']) self.assertEquals(doc._delta(), ({'db_dict_field': dict_value}, {})) doc._changed_fields = [] list_value = ['1', 2, {'hello': 'world'}] doc.list_field = list_value self.assertEquals(doc._get_changed_fields(), ['db_list_field']) self.assertEquals(doc._delta(), ({'db_list_field': list_value}, {})) # Test unsetting doc._changed_fields = [] doc.dict_field = {} self.assertEquals(doc._get_changed_fields(), ['db_dict_field']) self.assertEquals(doc._delta(), ({}, {'db_dict_field': 1})) doc._changed_fields = [] doc.list_field = [] self.assertEquals(doc._get_changed_fields(), ['db_list_field']) self.assertEquals(doc._delta(), ({}, {'db_list_field': 1})) # Test it saves that data doc = Doc() doc.save() doc.string_field = 'hello' doc.int_field = 1 doc.dict_field = {'hello': 'world'} doc.list_field = ['1', 2, {'hello': 'world'}] doc.save() doc = doc.reload(10) self.assertEquals(doc.string_field, 'hello') self.assertEquals(doc.int_field, 1) self.assertEquals(doc.dict_field, {'hello': 'world'}) self.assertEquals(doc.list_field, ['1', 2, {'hello': 'world'}]) def test_delta_recursive_db_field(self): class Embedded(EmbeddedDocument): string_field = StringField(db_field='db_string_field') int_field = IntField(db_field='db_int_field') dict_field = DictField(db_field='db_dict_field') list_field = ListField(db_field='db_list_field') class Doc(Document): string_field = StringField(db_field='db_string_field') int_field = IntField(db_field='db_int_field') dict_field = DictField(db_field='db_dict_field') list_field = ListField(db_field='db_list_field') embedded_field = EmbeddedDocumentField(Embedded, db_field='db_embedded_field') Doc.drop_collection() doc = Doc() doc.save() doc = Doc.objects.first() self.assertEquals(doc._get_changed_fields(), []) self.assertEquals(doc._delta(), ({}, {})) embedded_1 = Embedded() embedded_1.string_field = 'hello' embedded_1.int_field = 1 embedded_1.dict_field = {'hello': 'world'} embedded_1.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field = embedded_1 self.assertEquals(doc._get_changed_fields(), ['db_embedded_field']) embedded_delta = { 'db_string_field': 'hello', 'db_int_field': 1, 'db_dict_field': {'hello': 'world'}, 'db_list_field': ['1', 2, {'hello': 'world'}] } self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) embedded_delta.update({ '_types': ['Embedded'], '_cls': 'Embedded', }) self.assertEquals(doc._delta(), ({'db_embedded_field': embedded_delta}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.dict_field = {} self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_dict_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'db_dict_field': 1})) self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_dict_field': 1})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.dict_field, {}) doc.embedded_field.list_field = [] self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'db_list_field': 1})) self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_list_field': 1})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field, []) embedded_2 = Embedded() embedded_2.string_field = 'hello' embedded_2.int_field = 1 embedded_2.dict_field = {'hello': 'world'} embedded_2.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field.list_field = ['1', 2, embedded_2] self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) self.assertEquals(doc.embedded_field._delta(), ({ 'db_list_field': ['1', 2, { '_cls': 'Embedded', '_types': ['Embedded'], 'db_string_field': 'hello', 'db_dict_field': {'hello': 'world'}, 'db_int_field': 1, 'db_list_field': ['1', 2, {'hello': 'world'}], }] }, {})) self.assertEquals(doc._delta(), ({ 'db_embedded_field.db_list_field': ['1', 2, { '_cls': 'Embedded', '_types': ['Embedded'], 'db_string_field': 'hello', 'db_dict_field': {'hello': 'world'}, 'db_int_field': 1, 'db_list_field': ['1', 2, {'hello': 'world'}], }] }, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[0], '1') self.assertEquals(doc.embedded_field.list_field[1], 2) for k in doc.embedded_field.list_field[2]._fields: self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) doc.embedded_field.list_field[2].string_field = 'world' self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field.2.db_string_field']) self.assertEquals(doc.embedded_field._delta(), ({'db_list_field.2.db_string_field': 'world'}, {})) self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') # Test multiple assignments doc.embedded_field.list_field[2].string_field = 'hello world' doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] self.assertEquals(doc._get_changed_fields(), ['db_embedded_field.db_list_field']) self.assertEquals(doc.embedded_field._delta(), ({ 'db_list_field': ['1', 2, { '_types': ['Embedded'], '_cls': 'Embedded', 'db_string_field': 'hello world', 'db_int_field': 1, 'db_list_field': ['1', 2, {'hello': 'world'}], 'db_dict_field': {'hello': 'world'}}]}, {})) self.assertEquals(doc._delta(), ({ 'db_embedded_field.db_list_field': ['1', 2, { '_types': ['Embedded'], '_cls': 'Embedded', 'db_string_field': 'hello world', 'db_int_field': 1, 'db_list_field': ['1', 2, {'hello': 'world'}], 'db_dict_field': {'hello': 'world'}} ]}, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}]}, {})) doc.save() doc = doc.reload(10) doc.embedded_field.list_field[2].list_field.append(1) self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [2, {'hello': 'world'}, 1]}, {})) doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) doc.embedded_field.list_field[2].list_field.sort() doc.save() doc = doc.reload(10) self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) del(doc.embedded_field.list_field[2].list_field[2]['hello']) self.assertEquals(doc._delta(), ({'db_embedded_field.db_list_field.2.db_list_field': [1, 2, {}]}, {})) doc.save() doc = doc.reload(10) del(doc.embedded_field.list_field[2].list_field) self.assertEquals(doc._delta(), ({}, {'db_embedded_field.db_list_field.2.db_list_field': 1})) def test_save_only_changed_fields(self): """Ensure save only sets / unsets changed fields """ class User(self.Person): active = BooleanField(default=True) User.drop_collection() # Create person object and save it to the database user = User(name='Test User', age=30, active=True) user.save() user.reload() # Simulated Race condition same_person = self.Person.objects.get() same_person.active = False user.age = 21 user.save() same_person.name = 'User' same_person.save() person = self.Person.objects.get() self.assertEquals(person.name, 'User') self.assertEquals(person.age, 21) self.assertEquals(person.active, False) def test_save_only_changed_fields_recursive(self): """Ensure save only sets / unsets changed fields """ class Comment(EmbeddedDocument): published = BooleanField(default=True) class User(self.Person): comments_dict = DictField() comments = ListField(EmbeddedDocumentField(Comment)) active = BooleanField(default=True) User.drop_collection() # Create person object and save it to the database person = User(name='Test User', age=30, active=True) person.comments.append(Comment()) person.save() person.reload() person = self.Person.objects.get() self.assertTrue(person.comments[0].published) person.comments[0].published = False person.save() person = self.Person.objects.get() self.assertFalse(person.comments[0].published) # Simple dict w person.comments_dict['first_post'] = Comment() person.save() person = self.Person.objects.get() self.assertTrue(person.comments_dict['first_post'].published) person.comments_dict['first_post'].published = False person.save() person = self.Person.objects.get() self.assertFalse(person.comments_dict['first_post'].published) def test_delete(self): """Ensure that document may be deleted using the delete method. """ person = self.Person(name="Test User", age=30) person.save() self.assertEqual(len(self.Person.objects), 1) person.delete() self.assertEqual(len(self.Person.objects), 0) def test_save_custom_id(self): """Ensure that a document may be saved with a custom _id. """ # Create person object and save it to the database person = self.Person(name='Test User', age=30, id='497ce96f395f2f052a494fd4') person.save() # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') def test_save_custom_pk(self): """Ensure that a document may be saved with a custom _id using pk alias. """ # Create person object and save it to the database person = self.Person(name='Test User', age=30, pk='497ce96f395f2f052a494fd4') person.save() # Ensure that the object is in the database with the correct _id collection = self.db[self.Person._get_collection_name()] person_obj = collection.find_one({'name': 'Test User'}) self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') def test_save_list(self): """Ensure that a list field may be properly saved. """ class Comment(EmbeddedDocument): content = StringField() class BlogPost(Document): content = StringField() comments = ListField(EmbeddedDocumentField(Comment)) tags = ListField(StringField()) BlogPost.drop_collection() post = BlogPost(content='Went for a walk today...') post.tags = tags = ['fun', 'leisure'] comments = [Comment(content='Good for you'), Comment(content='Yay.')] post.comments = comments post.save() collection = self.db[BlogPost._get_collection_name()] post_obj = collection.find_one() self.assertEqual(post_obj['tags'], tags) for comment_obj, comment in zip(post_obj['comments'], comments): self.assertEqual(comment_obj['content'], comment['content']) BlogPost.drop_collection() def test_list_search_by_embedded(self): class User(Document): username = StringField(required=True) meta = {'allow_inheritance': False} class Comment(EmbeddedDocument): comment = StringField() user = ReferenceField(User, required=True) meta = {'allow_inheritance': False} class Page(Document): comments = ListField(EmbeddedDocumentField(Comment)) meta = {'allow_inheritance': False, 'indexes': [ {'fields': ['comments.user']} ]} User.drop_collection() Page.drop_collection() u1 = User(username="wilson") u1.save() u2 = User(username="rozza") u2.save() u3 = User(username="hmarr") u3.save() p1 = Page(comments = [Comment(user=u1, comment="Its very good"), Comment(user=u2, comment="Hello world"), Comment(user=u3, comment="Ping Pong"), Comment(user=u1, comment="I like a beer")]) p1.save() p2 = Page(comments = [Comment(user=u1, comment="Its very good"), Comment(user=u2, comment="Hello world")]) p2.save() p3 = Page(comments = [Comment(user=u3, comment="Its very good")]) p3.save() p4 = Page(comments = [Comment(user=u2, comment="Heavy Metal song")]) p4.save() self.assertEqual([p1, p2], list(Page.objects.filter(comments__user=u1))) self.assertEqual([p1, p2, p4], list(Page.objects.filter(comments__user=u2))) self.assertEqual([p1, p3], list(Page.objects.filter(comments__user=u3))) def test_save_embedded_document(self): """Ensure that a document with an embedded document field may be saved in the database. """ class EmployeeDetails(EmbeddedDocument): position = StringField() class Employee(self.Person): salary = IntField() details = EmbeddedDocumentField(EmployeeDetails) # Create employee object and save it to the database employee = Employee(name='Test Employee', age=50, salary=20000) employee.details = EmployeeDetails(position='Developer') employee.save() # Ensure that the object is in the database collection = self.db[self.Person._get_collection_name()] employee_obj = collection.find_one({'name': 'Test Employee'}) self.assertEqual(employee_obj['name'], 'Test Employee') self.assertEqual(employee_obj['age'], 50) # Ensure that the 'details' embedded object saved correctly self.assertEqual(employee_obj['details']['position'], 'Developer') def test_embedded_update_after_save(self): """ Test update of `EmbeddedDocumentField` attached to a newly saved document. """ class Page(EmbeddedDocument): log_message = StringField(verbose_name="Log message", required=True) class Site(Document): page = EmbeddedDocumentField(Page) Site.drop_collection() site = Site(page=Page(log_message="Warning: Dummy message")) site.save() # Update site.page.log_message = "Error: Dummy message" site.save() site = Site.objects.first() self.assertEqual(site.page.log_message, "Error: Dummy message") def test_updating_an_embedded_document(self): """Ensure that a document with an embedded document field may be saved in the database. """ class EmployeeDetails(EmbeddedDocument): position = StringField() class Employee(self.Person): salary = IntField() details = EmbeddedDocumentField(EmployeeDetails) # Create employee object and save it to the database employee = Employee(name='Test Employee', age=50, salary=20000) employee.details = EmployeeDetails(position='Developer') employee.save() # Test updating an embedded document promoted_employee = Employee.objects.get(name='Test Employee') promoted_employee.details.position = 'Senior Developer' promoted_employee.save() promoted_employee.reload() self.assertEqual(promoted_employee.name, 'Test Employee') self.assertEqual(promoted_employee.age, 50) # Ensure that the 'details' embedded object saved correctly self.assertEqual(promoted_employee.details.position, 'Senior Developer') # Test removal promoted_employee.details = None promoted_employee.save() promoted_employee.reload() self.assertEqual(promoted_employee.details, None) def test_mixins_dont_add_to_types(self): class Bob(Document): name = StringField() Bob.drop_collection() p = Bob(name="Rozza") p.save() Bob.drop_collection() class Person(Document, Mixin): pass Person.drop_collection() p = Person(name="Rozza") p.save() self.assertEquals(p._fields.keys(), ['name', 'id']) collection = self.db[Person._get_collection_name()] obj = collection.find_one() self.assertEquals(obj['_cls'], 'Person') self.assertEquals(obj['_types'], ['Person']) self.assertEquals(Person.objects.count(), 1) rozza = Person.objects.get(name="Rozza") Person.drop_collection() def test_mixin_inheritance(self): class BaseMixIn(object): count = IntField() data = StringField() class DoubleMixIn(BaseMixIn): comment = StringField() class TestDoc(Document, DoubleMixIn): age = IntField() TestDoc.drop_collection() t = TestDoc(count=12, data="test", comment="great!", age=19) t.save() t = TestDoc.objects.first() self.assertEquals(t.age, 19) self.assertEquals(t.comment, "great!") self.assertEquals(t.data, "test") self.assertEquals(t.count, 12) def test_save_reference(self): """Ensure that a document reference field may be saved in the database. """ class BlogPost(Document): meta = {'collection': 'blogpost_1'} content = StringField() author = ReferenceField(self.Person) BlogPost.drop_collection() author = self.Person(name='Test User') author.save() post = BlogPost(content='Watched some TV today... how exciting.') # Should only reference author when saving post.author = author post.save() post_obj = BlogPost.objects.first() # Test laziness self.assertTrue(isinstance(post_obj._data['author'], bson.DBRef)) self.assertTrue(isinstance(post_obj.author, self.Person)) self.assertEqual(post_obj.author.name, 'Test User') # Ensure that the dereferenced object may be changed and saved post_obj.author.age = 25 post_obj.author.save() author = list(self.Person.objects(name='Test User'))[-1] self.assertEqual(author.age, 25) BlogPost.drop_collection() def test_cannot_perform_joins_references(self): class BlogPost(Document): author = ReferenceField(self.Person) author2 = GenericReferenceField() def test_reference(): list(BlogPost.objects(author__name="test")) self.assertRaises(InvalidQueryError, test_reference) def test_generic_reference(): list(BlogPost.objects(author2__name="test")) self.assertRaises(InvalidQueryError, test_generic_reference) def test_duplicate_db_fields_raise_invalid_document_error(self): """Ensure a InvalidDocumentError is thrown if duplicate fields declare the same db_field""" def throw_invalid_document_error(): class Foo(Document): name = StringField() name2 = StringField(db_field='name') self.assertRaises(InvalidDocumentError, throw_invalid_document_error) def test_invalid_son(self): """Raise an error if loading invalid data""" class Occurrence(EmbeddedDocument): number = IntField() class Word(Document): stem = StringField() count = IntField(default=1) forms = ListField(StringField(), default=list) occurs = ListField(EmbeddedDocumentField(Occurrence), default=list) def raise_invalid_document(): Word._from_son({'stem': [1,2,3], 'forms': 1, 'count': 'one', 'occurs': {"hello": None}}) self.assertRaises(InvalidDocumentError, raise_invalid_document) def test_reverse_delete_rule_cascade_and_nullify(self): """Ensure that a referenced document is also deleted upon deletion. """ class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) reviewer = ReferenceField(self.Person, reverse_delete_rule=NULLIFY) self.Person.drop_collection() BlogPost.drop_collection() author = self.Person(name='Test User') author.save() reviewer = self.Person(name='Re Viewer') reviewer.save() post = BlogPost(content = 'Watched some TV') post.author = author post.reviewer = reviewer post.save() reviewer.delete() self.assertEqual(len(BlogPost.objects), 1) # No effect on the BlogPost self.assertEqual(BlogPost.objects.get().reviewer, None) # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() self.assertEqual(len(BlogPost.objects), 0) def test_reverse_delete_rule_cascade_and_nullify_complex_field(self): """Ensure that a referenced document is also deleted upon deletion. """ class BlogPost(Document): content = StringField() authors = ListField(ReferenceField(self.Person, reverse_delete_rule=CASCADE)) reviewers = ListField(ReferenceField(self.Person, reverse_delete_rule=NULLIFY)) self.Person.drop_collection() BlogPost.drop_collection() author = self.Person(name='Test User') author.save() reviewer = self.Person(name='Re Viewer') reviewer.save() post = BlogPost(content= 'Watched some TV') post.authors = [author] post.reviewers = [reviewer] post.save() reviewer.delete() self.assertEqual(len(BlogPost.objects), 1) # No effect on the BlogPost self.assertEqual(BlogPost.objects.get().reviewers, []) # Delete the Person, which should lead to deletion of the BlogPost, too author.delete() self.assertEqual(len(BlogPost.objects), 0) def test_two_way_reverse_delete_rule(self): """Ensure that Bi-Directional relationships work with reverse_delete_rule """ class Bar(Document): content = StringField() foo = ReferenceField('Foo') class Foo(Document): content = StringField() bar = ReferenceField(Bar) Bar.register_delete_rule(Foo, 'bar', NULLIFY) Foo.register_delete_rule(Bar, 'foo', NULLIFY) Bar.drop_collection() Foo.drop_collection() b = Bar(content="Hello") b.save() f = Foo(content="world", bar=b) f.save() b.foo = f b.save() f.delete() self.assertEqual(len(Bar.objects), 1) # No effect on the BlogPost self.assertEqual(Bar.objects.get().foo, None) def test_invalid_reverse_delete_rules_raise_errors(self): def throw_invalid_document_error(): class Blog(Document): content = StringField() authors = MapField(ReferenceField(self.Person, reverse_delete_rule=CASCADE)) reviewers = DictField(field=ReferenceField(self.Person, reverse_delete_rule=NULLIFY)) self.assertRaises(InvalidDocumentError, throw_invalid_document_error) def throw_invalid_document_error_embedded(): class Parents(EmbeddedDocument): father = ReferenceField('Person', reverse_delete_rule=DENY) mother = ReferenceField('Person', reverse_delete_rule=DENY) self.assertRaises(InvalidDocumentError, throw_invalid_document_error_embedded) def test_reverse_delete_rule_cascade_recurs(self): """Ensure that a chain of documents is also deleted upon cascaded deletion. """ class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) class Comment(Document): text = StringField() post = ReferenceField(BlogPost, reverse_delete_rule=CASCADE) self.Person.drop_collection() BlogPost.drop_collection() Comment.drop_collection() author = self.Person(name='Test User') author.save() post = BlogPost(content = 'Watched some TV') post.author = author post.save() comment = Comment(text = 'Kudos.') comment.post = post comment.save() # Delete the Person, which should lead to deletion of the BlogPost, and, # recursively to the Comment, too author.delete() self.assertEqual(len(Comment.objects), 0) self.Person.drop_collection() BlogPost.drop_collection() Comment.drop_collection() def test_reverse_delete_rule_deny(self): """Ensure that a document cannot be referenced if there are still documents referring to it. """ class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=DENY) self.Person.drop_collection() BlogPost.drop_collection() author = self.Person(name='Test User') author.save() post = BlogPost(content = 'Watched some TV') post.author = author post.save() # Delete the Person should be denied self.assertRaises(OperationError, author.delete) # Should raise denied error self.assertEqual(len(BlogPost.objects), 1) # No objects may have been deleted self.assertEqual(len(self.Person.objects), 1) # Other users, that don't have BlogPosts must be removable, like normal author = self.Person(name='Another User') author.save() self.assertEqual(len(self.Person.objects), 2) author.delete() self.assertEqual(len(self.Person.objects), 1) self.Person.drop_collection() BlogPost.drop_collection() def subclasses_and_unique_keys_works(self): class A(Document): pass class B(A): foo = BooleanField(unique=True) A.drop_collection() B.drop_collection() A().save() A().save() B(foo=True).save() self.assertEquals(A.objects.count(), 2) self.assertEquals(B.objects.count(), 1) A.drop_collection() B.drop_collection() def test_document_hash(self): """Test document in list, dict, set """ class User(Document): pass class BlogPost(Document): pass # Clear old datas User.drop_collection() BlogPost.drop_collection() u1 = User.objects.create() u2 = User.objects.create() u3 = User.objects.create() u4 = User() # New object b1 = BlogPost.objects.create() b2 = BlogPost.objects.create() # in List all_user_list = list(User.objects.all()) self.assertTrue(u1 in all_user_list) self.assertTrue(u2 in all_user_list) self.assertTrue(u3 in all_user_list) self.assertFalse(u4 in all_user_list) # New object self.assertFalse(b1 in all_user_list) # Other object self.assertFalse(b2 in all_user_list) # Other object # in Dict all_user_dic = {} for u in User.objects.all(): all_user_dic[u] = "OK" self.assertEqual(all_user_dic.get(u1, False), "OK" ) self.assertEqual(all_user_dic.get(u2, False), "OK" ) self.assertEqual(all_user_dic.get(u3, False), "OK" ) self.assertEqual(all_user_dic.get(u4, False), False ) # New object self.assertEqual(all_user_dic.get(b1, False), False ) # Other object self.assertEqual(all_user_dic.get(b2, False), False ) # Other object # in Set all_user_set = set(User.objects.all()) self.assertTrue(u1 in all_user_set ) def test_picklable(self): pickle_doc = PickleTest(number=1, string="One", lists=['1', '2']) pickle_doc.embedded = PickleEmbedded() pickle_doc.save() pickled_doc = pickle.dumps(pickle_doc) resurrected = pickle.loads(pickled_doc) self.assertEquals(resurrected, pickle_doc) resurrected.string = "Two" resurrected.save() pickle_doc = pickle_doc.reload() self.assertEquals(resurrected, pickle_doc) def test_throw_invalid_document_error(self): # test handles people trying to upsert def throw_invalid_document_error(): class Blog(Document): validate = DictField() self.assertRaises(InvalidDocumentError, throw_invalid_document_error) def test_mutating_documents(self): class B(EmbeddedDocument): field1 = StringField(default='field1') class A(Document): b = EmbeddedDocumentField(B, default=lambda: B()) A.drop_collection() a = A() a.save() a.reload() self.assertEquals(a.b.field1, 'field1') class C(EmbeddedDocument): c_field = StringField(default='cfield') class B(EmbeddedDocument): field1 = StringField(default='field1') field2 = EmbeddedDocumentField(C, default=lambda: C()) class A(Document): b = EmbeddedDocumentField(B, default=lambda: B()) a = A.objects()[0] a.b.field2.c_field = 'new value' a.save() a.reload() self.assertEquals(a.b.field2.c_field, 'new value') def test_can_save_false_values(self): """Ensures you can save False values on save""" class Doc(Document): foo = StringField() archived = BooleanField(default=False, required=True) Doc.drop_collection() d = Doc() d.save() d.archived = False d.save() self.assertEquals(Doc.objects(archived=False).count(), 1) def test_can_save_false_values_dynamic(self): """Ensures you can save False values on dynamic docs""" class Doc(DynamicDocument): foo = StringField() Doc.drop_collection() d = Doc() d.save() d.archived = False d.save() self.assertEquals(Doc.objects(archived=False).count(), 1) def test_do_not_save_unchanged_references(self): """Ensures cascading saves dont auto update""" class Job(Document): name = StringField() class Person(Document): name = StringField() age = IntField() job = ReferenceField(Job) Job.drop_collection() Person.drop_collection() job = Job(name="Job 1") # job should not have any changed fields after the save job.save() person = Person(name="name", age=10, job=job) from pymongo.collection import Collection orig_update = Collection.update try: def fake_update(*args, **kwargs): self.fail("Unexpected update for %s" % args[0].name) return orig_update(*args, **kwargs) Collection.update = fake_update person.save() finally: Collection.update = orig_update def test_db_alias_tests(self): """ DB Alias tests """ # mongoenginetest - Is default connection alias from setUp() # Register Aliases register_connection('testdb-1', 'mongoenginetest2') register_connection('testdb-2', 'mongoenginetest3') register_connection('testdb-3', 'mongoenginetest4') class User(Document): name = StringField() meta = {"db_alias": "testdb-1"} class Book(Document): name = StringField() meta = {"db_alias": "testdb-2"} # Drops User.drop_collection() Book.drop_collection() # Create bob = User.objects.create(name="Bob") hp = Book.objects.create(name="Harry Potter") # Selects self.assertEqual(User.objects.first(), bob) self.assertEqual(Book.objects.first(), hp) # DeRefecence class AuthorBooks(Document): author = ReferenceField(User) book = ReferenceField(Book) meta = {"db_alias": "testdb-3"} # Drops AuthorBooks.drop_collection() ab = AuthorBooks.objects.create(author=bob, book=hp) # select self.assertEqual(AuthorBooks.objects.first(), ab) self.assertEqual(AuthorBooks.objects.first().book, hp) self.assertEqual(AuthorBooks.objects.first().author, bob) self.assertEqual(AuthorBooks.objects.filter(author=bob).first(), ab) self.assertEqual(AuthorBooks.objects.filter(book=hp).first(), ab) # DB Alias self.assertEqual(User._get_db(), get_db("testdb-1")) self.assertEqual(Book._get_db(), get_db("testdb-2")) self.assertEqual(AuthorBooks._get_db(), get_db("testdb-3")) # Collections self.assertEqual(User._get_collection(), get_db("testdb-1")[User._get_collection_name()]) self.assertEqual(Book._get_collection(), get_db("testdb-2")[Book._get_collection_name()]) self.assertEqual(AuthorBooks._get_collection(), get_db("testdb-3")[AuthorBooks._get_collection_name()]) def test_db_ref_usage(self): """ DB Ref usage in __raw__ queries """ class User(Document): name = StringField() class Book(Document): name = StringField() author = ReferenceField(User) extra = DictField() meta = { 'ordering': ['+name'] } def __unicode__(self): return self.name def __str__(self): return self.name # Drops User.drop_collection() Book.drop_collection() # Authors bob = User.objects.create(name="Bob") jon = User.objects.create(name="Jon") # Redactors karl = User.objects.create(name="Karl") susan = User.objects.create(name="Susan") peter = User.objects.create(name="Peter") # Bob Book.objects.create(name="1", author=bob, extra={"a": bob.to_dbref(), "b": [karl.to_dbref(), susan.to_dbref()]}) Book.objects.create(name="2", author=bob, extra={"a": bob.to_dbref(), "b": karl.to_dbref()} ) Book.objects.create(name="3", author=bob, extra={"a": bob.to_dbref(), "c": [jon.to_dbref(), peter.to_dbref()]}) Book.objects.create(name="4", author=bob) # Jon Book.objects.create(name="5", author=jon) Book.objects.create(name="6", author=peter) Book.objects.create(name="7", author=jon) Book.objects.create(name="8", author=jon) Book.objects.create(name="9", author=jon, extra={"a": peter.to_dbref()}) # Checks self.assertEqual(u",".join([str(b) for b in Book.objects.all()] ) , "1,2,3,4,5,6,7,8,9" ) # bob related books self.assertEqual(u",".join([str(b) for b in Book.objects.filter( Q(extra__a=bob ) | Q(author=bob) | Q(extra__b=bob))]) , "1,2,3,4") # Susan & Karl related books self.assertEqual(u",".join([str(b) for b in Book.objects.filter( Q(extra__a__all=[karl, susan] ) | Q(author__all=[karl, susan ] ) | Q(extra__b__all=[karl.to_dbref(), susan.to_dbref()] ) ) ] ) , "1" ) # $Where self.assertEqual(u",".join([str(b) for b in Book.objects.filter( __raw__={ "$where": """ function(){ return this.name == '1' || this.name == '2';}""" } ) ]), "1,2") class ValidatorErrorTest(unittest.TestCase): def test_to_dict(self): """Ensure a ValidationError handles error to_dict correctly. """ error = ValidationError('root') self.assertEquals(error.to_dict(), {}) # 1st level error schema error.errors = {'1st': ValidationError('bad 1st'), } self.assertTrue('1st' in error.to_dict()) self.assertEquals(error.to_dict()['1st'], 'bad 1st') # 2nd level error schema error.errors = {'1st': ValidationError('bad 1st', errors={ '2nd': ValidationError('bad 2nd'), })} self.assertTrue('1st' in error.to_dict()) self.assertTrue(isinstance(error.to_dict()['1st'], dict)) self.assertTrue('2nd' in error.to_dict()['1st']) self.assertEquals(error.to_dict()['1st']['2nd'], 'bad 2nd') # moar levels error.errors = {'1st': ValidationError('bad 1st', errors={ '2nd': ValidationError('bad 2nd', errors={ '3rd': ValidationError('bad 3rd', errors={ '4th': ValidationError('Inception'), }), }), })} self.assertTrue('1st' in error.to_dict()) self.assertTrue('2nd' in error.to_dict()['1st']) self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) self.assertEquals(error.to_dict()['1st']['2nd']['3rd']['4th'], 'Inception') self.assertEquals(error.message, "root:\n1st.2nd.3rd.4th: Inception") def test_model_validation(self): class User(Document): username = StringField(primary_key=True) name = StringField(required=True) try: User().validate() except ValidationError, e: expected_error_message = """Errors encountered validating document: username: Field is required ("username") name: Field is required ("name")""" self.assertEquals(e.message, expected_error_message) self.assertEquals(e.to_dict(), { 'username': 'Field is required ("username")', 'name': u'Field is required ("name")'}) def test_spaces_in_keys(self): class Embedded(DynamicEmbeddedDocument): pass class Doc(DynamicDocument): pass Doc.drop_collection() doc = Doc() setattr(doc, 'hello world', 1) doc.save() one = Doc.objects.filter(**{'hello world': 1}).count() self.assertEqual(1, one) if __name__ == '__main__': unittest.main() MongoEngine-mongoengine-7a1b110/tests/test_dynamic_document.py000066400000000000000000000430601177143177100246510ustar00rootroot00000000000000import unittest from mongoengine import * from mongoengine.connection import get_db class DynamicDocTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') self.db = get_db() class Person(DynamicDocument): name = StringField() meta = {'allow_inheritance': True} Person.drop_collection() self.Person = Person def test_simple_dynamic_document(self): """Ensures simple dynamic documents are saved correctly""" p = self.Person() p.name = "James" p.age = 34 self.assertEquals(p.to_mongo(), {"_types": ["Person"], "_cls": "Person", "name": "James", "age": 34} ) p.save() self.assertEquals(self.Person.objects.first().age, 34) # Confirm no changes to self.Person self.assertFalse(hasattr(self.Person, 'age')) def test_dynamic_document_delta(self): """Ensures simple dynamic documents can delta correctly""" p = self.Person(name="James", age=34) self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) p.doc = 123 del(p.doc) self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) def test_change_scope_of_variable(self): """Test changing the scope of a dynamic field has no adverse effects""" p = self.Person() p.name = "Dean" p.misc = 22 p.save() p = self.Person.objects.get() p.misc = {'hello': 'world'} p.save() p = self.Person.objects.get() self.assertEquals(p.misc, {'hello': 'world'}) def test_delete_dynamic_field(self): """Test deleting a dynamic field works""" self.Person.drop_collection() p = self.Person() p.name = "Dean" p.misc = 22 p.save() p = self.Person.objects.get() p.misc = {'hello': 'world'} p.save() p = self.Person.objects.get() self.assertEquals(p.misc, {'hello': 'world'}) collection = self.db[self.Person._get_collection_name()] obj = collection.find_one() self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) del(p.misc) p.save() p = self.Person.objects.get() self.assertFalse(hasattr(p, 'misc')) obj = collection.find_one() self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) def test_dynamic_document_queries(self): """Ensure we can query dynamic fields""" p = self.Person() p.name = "Dean" p.age = 22 p.save() self.assertEquals(1, self.Person.objects(age=22).count()) p = self.Person.objects(age=22) p = p.get() self.assertEquals(22, p.age) def test_complex_dynamic_document_queries(self): class Person(DynamicDocument): name = StringField() Person.drop_collection() p = Person(name="test") p.age = "ten" p.save() p1 = Person(name="test1") p1.age = "less then ten and a half" p1.save() p2 = Person(name="test2") p2.age = 10 p2.save() self.assertEquals(Person.objects(age__icontains='ten').count(), 2) self.assertEquals(Person.objects(age__gte=10).count(), 1) def test_complex_data_lookups(self): """Ensure you can query dynamic document dynamic fields""" p = self.Person() p.misc = {'hello': 'world'} p.save() self.assertEquals(1, self.Person.objects(misc__hello='world').count()) def test_inheritance(self): """Ensure that dynamic document plays nice with inheritance""" class Employee(self.Person): salary = IntField() Employee.drop_collection() self.assertTrue('name' in Employee._fields) self.assertTrue('salary' in Employee._fields) self.assertEqual(Employee._get_collection_name(), self.Person._get_collection_name()) joe_bloggs = Employee() joe_bloggs.name = "Joe Bloggs" joe_bloggs.salary = 10 joe_bloggs.age = 20 joe_bloggs.save() self.assertEquals(1, self.Person.objects(age=20).count()) self.assertEquals(1, Employee.objects(age=20).count()) joe_bloggs = self.Person.objects.first() self.assertTrue(isinstance(joe_bloggs, Employee)) def test_embedded_dynamic_document(self): """Test dynamic embedded documents""" class Embedded(DynamicEmbeddedDocument): pass class Doc(DynamicDocument): pass Doc.drop_collection() doc = Doc() embedded_1 = Embedded() embedded_1.string_field = 'hello' embedded_1.int_field = 1 embedded_1.dict_field = {'hello': 'world'} embedded_1.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field = embedded_1 self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", "embedded_field": { "_types": ['Embedded'], "_cls": "Embedded", "string_field": "hello", "int_field": 1, "dict_field": {"hello": "world"}, "list_field": ['1', 2, {'hello': 'world'}] } }) doc.save() doc = Doc.objects.first() self.assertEquals(doc.embedded_field.__class__, Embedded) self.assertEquals(doc.embedded_field.string_field, "hello") self.assertEquals(doc.embedded_field.int_field, 1) self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) def test_complex_embedded_documents(self): """Test complex dynamic embedded documents setups""" class Embedded(DynamicEmbeddedDocument): pass class Doc(DynamicDocument): pass Doc.drop_collection() doc = Doc() embedded_1 = Embedded() embedded_1.string_field = 'hello' embedded_1.int_field = 1 embedded_1.dict_field = {'hello': 'world'} embedded_2 = Embedded() embedded_2.string_field = 'hello' embedded_2.int_field = 1 embedded_2.dict_field = {'hello': 'world'} embedded_2.list_field = ['1', 2, {'hello': 'world'}] embedded_1.list_field = ['1', 2, embedded_2] doc.embedded_field = embedded_1 self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", "embedded_field": { "_types": ['Embedded'], "_cls": "Embedded", "string_field": "hello", "int_field": 1, "dict_field": {"hello": "world"}, "list_field": ['1', 2, {"_types": ['Embedded'], "_cls": "Embedded", "string_field": "hello", "int_field": 1, "dict_field": {"hello": "world"}, "list_field": ['1', 2, {'hello': 'world'}]} ] } }) doc.save() doc = Doc.objects.first() self.assertEquals(doc.embedded_field.__class__, Embedded) self.assertEquals(doc.embedded_field.string_field, "hello") self.assertEquals(doc.embedded_field.int_field, 1) self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) self.assertEquals(doc.embedded_field.list_field[0], '1') self.assertEquals(doc.embedded_field.list_field[1], 2) embedded_field = doc.embedded_field.list_field[2] self.assertEquals(embedded_field.__class__, Embedded) self.assertEquals(embedded_field.string_field, "hello") self.assertEquals(embedded_field.int_field, 1) self.assertEquals(embedded_field.dict_field, {'hello': 'world'}) self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) def test_delta_for_dynamic_documents(self): p = self.Person() p.name = "Dean" p.age = 22 p.save() p.age = 24 self.assertEquals(p.age, 24) self.assertEquals(p._get_changed_fields(), ['age']) self.assertEquals(p._delta(), ({'age': 24}, {})) p = self.Person.objects(age=22).get() p.age = 24 self.assertEquals(p.age, 24) self.assertEquals(p._get_changed_fields(), ['age']) self.assertEquals(p._delta(), ({'age': 24}, {})) p.save() self.assertEquals(1, self.Person.objects(age=24).count()) def test_delta(self): class Doc(DynamicDocument): pass Doc.drop_collection() doc = Doc() doc.save() doc = Doc.objects.first() self.assertEquals(doc._get_changed_fields(), []) self.assertEquals(doc._delta(), ({}, {})) doc.string_field = 'hello' self.assertEquals(doc._get_changed_fields(), ['string_field']) self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) doc._changed_fields = [] doc.int_field = 1 self.assertEquals(doc._get_changed_fields(), ['int_field']) self.assertEquals(doc._delta(), ({'int_field': 1}, {})) doc._changed_fields = [] dict_value = {'hello': 'world', 'ping': 'pong'} doc.dict_field = dict_value self.assertEquals(doc._get_changed_fields(), ['dict_field']) self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) doc._changed_fields = [] list_value = ['1', 2, {'hello': 'world'}] doc.list_field = list_value self.assertEquals(doc._get_changed_fields(), ['list_field']) self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) # Test unsetting doc._changed_fields = [] doc.dict_field = {} self.assertEquals(doc._get_changed_fields(), ['dict_field']) self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) doc._changed_fields = [] doc.list_field = [] self.assertEquals(doc._get_changed_fields(), ['list_field']) self.assertEquals(doc._delta(), ({}, {'list_field': 1})) def test_delta_recursive(self): """Testing deltaing works with dynamic documents""" class Embedded(DynamicEmbeddedDocument): pass class Doc(DynamicDocument): pass Doc.drop_collection() doc = Doc() doc.save() doc = Doc.objects.first() self.assertEquals(doc._get_changed_fields(), []) self.assertEquals(doc._delta(), ({}, {})) embedded_1 = Embedded() embedded_1.string_field = 'hello' embedded_1.int_field = 1 embedded_1.dict_field = {'hello': 'world'} embedded_1.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field = embedded_1 self.assertEquals(doc._get_changed_fields(), ['embedded_field']) embedded_delta = { 'string_field': 'hello', 'int_field': 1, 'dict_field': {'hello': 'world'}, 'list_field': ['1', 2, {'hello': 'world'}] } self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) embedded_delta.update({ '_types': ['Embedded'], '_cls': 'Embedded', }) self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) doc.save() doc.reload() doc.embedded_field.dict_field = {} self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) doc.save() doc.reload() doc.embedded_field.list_field = [] self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) doc.save() doc.reload() embedded_2 = Embedded() embedded_2.string_field = 'hello' embedded_2.int_field = 1 embedded_2.dict_field = {'hello': 'world'} embedded_2.list_field = ['1', 2, {'hello': 'world'}] doc.embedded_field.list_field = ['1', 2, embedded_2] self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { '_cls': 'Embedded', '_types': ['Embedded'], 'string_field': 'hello', 'dict_field': {'hello': 'world'}, 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], }] }, {})) self.assertEquals(doc._delta(), ({ 'embedded_field.list_field': ['1', 2, { '_cls': 'Embedded', '_types': ['Embedded'], 'string_field': 'hello', 'dict_field': {'hello': 'world'}, 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], }] }, {})) doc.save() doc.reload() self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, []) self.assertEquals(doc.embedded_field.list_field[0], '1') self.assertEquals(doc.embedded_field.list_field[1], 2) for k in doc.embedded_field.list_field[2]._fields: self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) doc.embedded_field.list_field[2].string_field = 'world' self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) doc.save() doc.reload() self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') # Test multiple assignments doc.embedded_field.list_field[2].string_field = 'hello world' doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) self.assertEquals(doc.embedded_field._delta(), ({ 'list_field': ['1', 2, { '_types': ['Embedded'], '_cls': 'Embedded', 'string_field': 'hello world', 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], 'dict_field': {'hello': 'world'}}]}, {})) self.assertEquals(doc._delta(), ({ 'embedded_field.list_field': ['1', 2, { '_types': ['Embedded'], '_cls': 'Embedded', 'string_field': 'hello world', 'int_field': 1, 'list_field': ['1', 2, {'hello': 'world'}], 'dict_field': {'hello': 'world'}} ]}, {})) doc.save() doc.reload() self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') # Test list native methods doc.embedded_field.list_field[2].list_field.pop(0) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) doc.save() doc.reload() doc.embedded_field.list_field[2].list_field.append(1) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) doc.save() doc.reload() self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) doc.embedded_field.list_field[2].list_field.sort() doc.save() doc.reload() self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) del(doc.embedded_field.list_field[2].list_field[2]['hello']) self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) doc.save() doc.reload() del(doc.embedded_field.list_field[2].list_field) self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) doc.save() doc.reload() doc.dict_field = {'embedded': embedded_1} doc.save() doc.reload() doc.dict_field['embedded'].string_field = 'Hello World' self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field']) self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) def test_indexes(self): """Ensure that indexes are used when meta[indexes] is specified. """ class BlogPost(DynamicDocument): meta = { 'indexes': [ '-date', ('category', '-date') ], } BlogPost.drop_collection() info = BlogPost.objects._collection.index_information() # _id, '-date', ('cat', 'date') # NB: there is no index on _types by itself, since # the indices on -date and tags will both contain # _types as first element in the key self.assertEqual(len(info), 3) # Indexes are lazy so use list() to perform query list(BlogPost.objects) info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] in info) self.assertTrue([('_types', 1), ('date', -1)] in info) MongoEngine-mongoengine-7a1b110/tests/test_fields.py000066400000000000000000002040561177143177100226010ustar00rootroot00000000000000import datetime import os import unittest import uuid import StringIO import tempfile import gridfs from decimal import Decimal from mongoengine import * from mongoengine.connection import get_db from mongoengine.base import _document_registry, NotRegistered TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') class FieldTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') self.db = get_db() def tearDown(self): self.db.drop_collection('fs.files') self.db.drop_collection('fs.chunks') def test_default_values(self): """Ensure that default field values are used when creating a document. """ class Person(Document): name = StringField() age = IntField(default=30, help_text="Your real age") userid = StringField(default=lambda: 'test', verbose_name="User Identity") person = Person(name='Test Person') self.assertEqual(person._data['age'], 30) self.assertEqual(person._data['userid'], 'test') self.assertEqual(person._fields['name'].help_text, None) self.assertEqual(person._fields['age'].help_text, "Your real age") self.assertEqual(person._fields['userid'].verbose_name, "User Identity") def test_required_values(self): """Ensure that required field constraints are enforced. """ class Person(Document): name = StringField(required=True) age = IntField(required=True) userid = StringField() person = Person(name="Test User") self.assertRaises(ValidationError, person.validate) person = Person(age=30) self.assertRaises(ValidationError, person.validate) def test_not_required_handles_none_in_update(self): """Ensure that every fields should accept None if required is False. """ class HandleNoneFields(Document): str_fld = StringField() int_fld = IntField() flt_fld = FloatField() comp_dt_fld = ComplexDateTimeField() HandleNoneFields.drop_collection() doc = HandleNoneFields() doc.str_fld = u'spam ham egg' doc.int_fld = 42 doc.flt_fld = 4.2 doc.com_dt_fld = datetime.datetime.utcnow() doc.save() res = HandleNoneFields.objects(id=doc.id).update( set__str_fld=None, set__int_fld=None, set__flt_fld=None, set__comp_dt_fld=None, ) self.assertEqual(res, 1) # Retrive data from db and verify it. ret = HandleNoneFields.objects.all()[0] self.assertEqual(ret.str_fld, None) self.assertEqual(ret.int_fld, None) self.assertEqual(ret.flt_fld, None) # Return current time if retrived value is None. self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) def test_not_required_handles_none_from_database(self): """Ensure that every fields can handle null values from the database. """ class HandleNoneFields(Document): str_fld = StringField(required=True) int_fld = IntField(required=True) flt_fld = FloatField(required=True) comp_dt_fld = ComplexDateTimeField(required=True) HandleNoneFields.drop_collection() doc = HandleNoneFields() doc.str_fld = u'spam ham egg' doc.int_fld = 42 doc.flt_fld = 4.2 doc.com_dt_fld = datetime.datetime.utcnow() doc.save() collection = self.db[HandleNoneFields._get_collection_name()] obj = collection.update({"_id": doc.id}, {"$unset": { "str_fld": 1, "int_fld": 1, "flt_fld": 1, "comp_dt_fld": 1} }) # Retrive data from db and verify it. ret = HandleNoneFields.objects.all()[0] self.assertEqual(ret.str_fld, None) self.assertEqual(ret.int_fld, None) self.assertEqual(ret.flt_fld, None) # Return current time if retrived value is None. self.assert_(isinstance(ret.comp_dt_fld, datetime.datetime)) self.assertRaises(ValidationError, ret.validate) def test_object_id_validation(self): """Ensure that invalid values cannot be assigned to string fields. """ class Person(Document): name = StringField() person = Person(name='Test User') self.assertEqual(person.id, None) person.id = 47 self.assertRaises(ValidationError, person.validate) person.id = 'abc' self.assertRaises(ValidationError, person.validate) person.id = '497ce96f395f2f052a494fd4' person.validate() def test_string_validation(self): """Ensure that invalid values cannot be assigned to string fields. """ class Person(Document): name = StringField(max_length=20) userid = StringField(r'[0-9a-z_]+$') person = Person(name=34) self.assertRaises(ValidationError, person.validate) # Test regex validation on userid person = Person(userid='test.User') self.assertRaises(ValidationError, person.validate) person.userid = 'test_user' self.assertEqual(person.userid, 'test_user') person.validate() # Test max length validation on name person = Person(name='Name that is more than twenty characters') self.assertRaises(ValidationError, person.validate) person.name = 'Shorter name' person.validate() def test_url_validation(self): """Ensure that URLFields validate urls properly. """ class Link(Document): url = URLField() link = Link() link.url = 'google' self.assertRaises(ValidationError, link.validate) link.url = 'http://www.google.com:8080' link.validate() def test_int_validation(self): """Ensure that invalid values cannot be assigned to int fields. """ class Person(Document): age = IntField(min_value=0, max_value=110) person = Person() person.age = 50 person.validate() person.age = -1 self.assertRaises(ValidationError, person.validate) person.age = 120 self.assertRaises(ValidationError, person.validate) person.age = 'ten' self.assertRaises(ValidationError, person.validate) def test_float_validation(self): """Ensure that invalid values cannot be assigned to float fields. """ class Person(Document): height = FloatField(min_value=0.1, max_value=3.5) person = Person() person.height = 1.89 person.validate() person.height = '2.0' self.assertRaises(ValidationError, person.validate) person.height = 0.01 self.assertRaises(ValidationError, person.validate) person.height = 4.0 self.assertRaises(ValidationError, person.validate) def test_decimal_validation(self): """Ensure that invalid values cannot be assigned to decimal fields. """ class Person(Document): height = DecimalField(min_value=Decimal('0.1'), max_value=Decimal('3.5')) Person.drop_collection() person = Person() person.height = Decimal('1.89') person.save() person.reload() self.assertEqual(person.height, Decimal('1.89')) person.height = '2.0' person.save() person.height = 0.01 self.assertRaises(ValidationError, person.validate) person.height = Decimal('0.01') self.assertRaises(ValidationError, person.validate) person.height = Decimal('4.0') self.assertRaises(ValidationError, person.validate) Person.drop_collection() def test_boolean_validation(self): """Ensure that invalid values cannot be assigned to boolean fields. """ class Person(Document): admin = BooleanField() person = Person() person.admin = True person.validate() person.admin = 2 self.assertRaises(ValidationError, person.validate) person.admin = 'Yes' self.assertRaises(ValidationError, person.validate) def test_uuid_validation(self): """Ensure that invalid values cannot be assigned to UUID fields. """ class Person(Document): api_key = UUIDField() person = Person() # any uuid type is valid person.api_key = uuid.uuid4() person.validate() person.api_key = uuid.uuid1() person.validate() # last g cannot belong to an hex number person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g' self.assertRaises(ValidationError, person.validate) # short strings don't validate person.api_key = '9d159858-549b-4975-9f98-dd2f987c113' self.assertRaises(ValidationError, person.validate) def test_datetime_validation(self): """Ensure that invalid values cannot be assigned to datetime fields. """ class LogEntry(Document): time = DateTimeField() log = LogEntry() log.time = datetime.datetime.now() log.validate() log.time = datetime.date.today() log.validate() log.time = -1 self.assertRaises(ValidationError, log.validate) log.time = '1pm' self.assertRaises(ValidationError, log.validate) def test_datetime(self): """Tests showing pymongo datetime fields handling of microseconds. Microseconds are rounded to the nearest millisecond and pre UTC handling is wonky. See: http://api.mongodb.org/python/current/api/bson/son.html#dt """ class LogEntry(Document): date = DateTimeField() LogEntry.drop_collection() # Test can save dates log = LogEntry() log.date = datetime.date.today() log.save() log.reload() self.assertEquals(log.date.date(), datetime.date.today()) LogEntry.drop_collection() # Post UTC - microseconds are rounded (down) nearest millisecond and dropped d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) d2 = datetime.datetime(1970, 01, 01, 00, 00, 01) log = LogEntry() log.date = d1 log.save() log.reload() self.assertNotEquals(log.date, d1) self.assertEquals(log.date, d2) # Post UTC - microseconds are rounded (down) nearest millisecond d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) d2 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9000) log.date = d1 log.save() log.reload() self.assertNotEquals(log.date, d1) self.assertEquals(log.date, d2) # Pre UTC dates microseconds below 1000 are dropped d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) log.date = d1 log.save() log.reload() self.assertNotEquals(log.date, d1) self.assertEquals(log.date, d2) # Pre UTC microseconds above 1000 is wonky. # log.date has an invalid microsecond value so I can't construct # a date to compare. # # However, the timedelta is predicable with pre UTC timestamps # It always adds 16 seconds and [777216-776217] microseconds for i in xrange(1001, 3113, 33): d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) log.date = d1 log.save() log.reload() self.assertNotEquals(log.date, d1) delta = log.date - d1 self.assertEquals(delta.seconds, 16) microseconds = 777216 - (i % 1000) self.assertEquals(delta.microseconds, microseconds) LogEntry.drop_collection() def test_complexdatetime_storage(self): """Tests for complex datetime fields - which can handle microseconds without rounding. """ class LogEntry(Document): date = ComplexDateTimeField() LogEntry.drop_collection() # Post UTC - microseconds are rounded (down) nearest millisecond and dropped - with default datetimefields d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) log = LogEntry() log.date = d1 log.save() log.reload() self.assertEquals(log.date, d1) # Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) log.date = d1 log.save() log.reload() self.assertEquals(log.date, d1) # Pre UTC dates microseconds below 1000 are dropped - with default datetimefields d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) log.date = d1 log.save() log.reload() self.assertEquals(log.date, d1) # Pre UTC microseconds above 1000 is wonky - with default datetimefields # log.date has an invalid microsecond value so I can't construct # a date to compare. for i in xrange(1001, 3113, 33): d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) log.date = d1 log.save() log.reload() self.assertEquals(log.date, d1) log1 = LogEntry.objects.get(date=d1) self.assertEqual(log, log1) LogEntry.drop_collection() def test_complexdatetime_usage(self): """Tests for complex datetime fields - which can handle microseconds without rounding. """ class LogEntry(Document): date = ComplexDateTimeField() LogEntry.drop_collection() d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 999) log = LogEntry() log.date = d1 log.save() log1 = LogEntry.objects.get(date=d1) self.assertEquals(log, log1) LogEntry.drop_collection() # create 60 log entries for i in xrange(1950, 2010): d = datetime.datetime(i, 01, 01, 00, 00, 01, 999) LogEntry(date=d).save() self.assertEqual(LogEntry.objects.count(), 60) # Test ordering logs = LogEntry.objects.order_by("date") count = logs.count() i = 0 while i == count - 1: self.assertTrue(logs[i].date <= logs[i + 1].date) i += 1 logs = LogEntry.objects.order_by("-date") count = logs.count() i = 0 while i == count - 1: self.assertTrue(logs[i].date >= logs[i + 1].date) i += 1 # Test searching logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) self.assertEqual(logs.count(), 30) logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) self.assertEqual(logs.count(), 30) logs = LogEntry.objects.filter( date__lte=datetime.datetime(2011, 1, 1), date__gte=datetime.datetime(2000, 1, 1), ) self.assertEqual(logs.count(), 10) LogEntry.drop_collection() def test_list_validation(self): """Ensure that a list field only accepts lists with valid elements. """ class User(Document): pass class Comment(EmbeddedDocument): content = StringField() class BlogPost(Document): content = StringField() comments = ListField(EmbeddedDocumentField(Comment)) tags = ListField(StringField()) authors = ListField(ReferenceField(User)) generic = ListField(GenericReferenceField()) post = BlogPost(content='Went for a walk today...') post.validate() post.tags = 'fun' self.assertRaises(ValidationError, post.validate) post.tags = [1, 2] self.assertRaises(ValidationError, post.validate) post.tags = ['fun', 'leisure'] post.validate() post.tags = ('fun', 'leisure') post.validate() post.comments = ['a'] self.assertRaises(ValidationError, post.validate) post.comments = 'yay' self.assertRaises(ValidationError, post.validate) comments = [Comment(content='Good for you'), Comment(content='Yay.')] post.comments = comments post.validate() post.authors = [Comment()] self.assertRaises(ValidationError, post.validate) post.authors = [User()] self.assertRaises(ValidationError, post.validate) user = User() user.save() post.authors = [user] post.validate() post.generic = [1, 2] self.assertRaises(ValidationError, post.validate) post.generic = [User(), Comment()] self.assertRaises(ValidationError, post.validate) post.generic = [Comment()] self.assertRaises(ValidationError, post.validate) post.generic = [user] post.validate() User.drop_collection() BlogPost.drop_collection() def test_sorted_list_sorting(self): """Ensure that a sorted list field properly sorts values. """ class Comment(EmbeddedDocument): order = IntField() content = StringField() class BlogPost(Document): content = StringField() comments = SortedListField(EmbeddedDocumentField(Comment), ordering='order') tags = SortedListField(StringField()) post = BlogPost(content='Went for a walk today...') post.save() post.tags = ['leisure', 'fun'] post.save() post.reload() self.assertEqual(post.tags, ['fun', 'leisure']) comment1 = Comment(content='Good for you', order=1) comment2 = Comment(content='Yay.', order=0) comments = [comment1, comment2] post.comments = comments post.save() post.reload() self.assertEqual(post.comments[0].content, comment2.content) self.assertEqual(post.comments[1].content, comment1.content) BlogPost.drop_collection() def test_reverse_list_sorting(self): '''Ensure that a reverse sorted list field properly sorts values''' class Category(EmbeddedDocument): count = IntField() name = StringField() class CategoryList(Document): categories = SortedListField(EmbeddedDocumentField(Category), ordering='count', reverse=True) name = StringField() catlist = CategoryList(name="Top categories") cat1 = Category(name='posts', count=10) cat2 = Category(name='food', count=100) cat3 = Category(name='drink', count=40) catlist.categories = [cat1, cat2, cat3] catlist.save() catlist.reload() self.assertEqual(catlist.categories[0].name, cat2.name) self.assertEqual(catlist.categories[1].name, cat3.name) self.assertEqual(catlist.categories[2].name, cat1.name) CategoryList.drop_collection() def test_list_field(self): """Ensure that list types work as expected. """ class BlogPost(Document): info = ListField() BlogPost.drop_collection() post = BlogPost() post.info = 'my post' self.assertRaises(ValidationError, post.validate) post.info = {'title': 'test'} self.assertRaises(ValidationError, post.validate) post.info = ['test'] post.save() post = BlogPost() post.info = [{'test': 'test'}] post.save() post = BlogPost() post.info = [{'test': 3}] post.save() self.assertEquals(BlogPost.objects.count(), 3) self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1) self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1) # Confirm handles non strings or non existing keys self.assertEquals(BlogPost.objects.filter(info__0__test__exact='5').count(), 0) self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) BlogPost.drop_collection() def test_list_field_passed_in_value(self): class Foo(Document): bars = ListField(ReferenceField("Bar")) class Bar(Document): text = StringField() bar = Bar(text="hi") bar.save() foo = Foo(bars=[]) foo.bars.append(bar) self.assertEquals(repr(foo.bars), '[]') def test_list_field_strict(self): """Ensure that list field handles validation if provided a strict field type.""" class Simple(Document): mapping = ListField(field=IntField()) Simple.drop_collection() e = Simple() e.mapping = [1] e.save() def create_invalid_mapping(): e.mapping = ["abc"] e.save() self.assertRaises(ValidationError, create_invalid_mapping) Simple.drop_collection() def test_list_field_rejects_strings(self): """Strings aren't valid list field data types""" class Simple(Document): mapping = ListField() Simple.drop_collection() e = Simple() e.mapping = 'hello world' self.assertRaises(ValidationError, e.save) def test_complex_field_required(self): """Ensure required cant be None / Empty""" class Simple(Document): mapping = ListField(required=True) Simple.drop_collection() e = Simple() e.mapping = [] self.assertRaises(ValidationError, e.save) class Simple(Document): mapping = DictField(required=True) Simple.drop_collection() e = Simple() e.mapping = {} self.assertRaises(ValidationError, e.save) def test_list_field_complex(self): """Ensure that the list fields can handle the complex types.""" class SettingBase(EmbeddedDocument): pass class StringSetting(SettingBase): value = StringField() class IntegerSetting(SettingBase): value = IntField() class Simple(Document): mapping = ListField() Simple.drop_collection() e = Simple() e.mapping.append(StringSetting(value='foo')) e.mapping.append(IntegerSetting(value=42)) e.mapping.append({'number': 1, 'string': 'Hi!', 'float': 1.001, 'complex': IntegerSetting(value=42), 'list': [IntegerSetting(value=42), StringSetting(value='foo')]}) e.save() e2 = Simple.objects.get(id=e.id) self.assertTrue(isinstance(e2.mapping[0], StringSetting)) self.assertTrue(isinstance(e2.mapping[1], IntegerSetting)) # Test querying self.assertEquals(Simple.objects.filter(mapping__1__value=42).count(), 1) self.assertEquals(Simple.objects.filter(mapping__2__number=1).count(), 1) self.assertEquals(Simple.objects.filter(mapping__2__complex__value=42).count(), 1) self.assertEquals(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1) self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1) # Confirm can update Simple.objects().update(set__mapping__1=IntegerSetting(value=10)) self.assertEquals(Simple.objects.filter(mapping__1__value=10).count(), 1) Simple.objects().update( set__mapping__2__list__1=StringSetting(value='Boo')) self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0) self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1) Simple.drop_collection() def test_dict_field(self): """Ensure that dict types work as expected. """ class BlogPost(Document): info = DictField() BlogPost.drop_collection() post = BlogPost() post.info = 'my post' self.assertRaises(ValidationError, post.validate) post.info = ['test', 'test'] self.assertRaises(ValidationError, post.validate) post.info = {'$title': 'test'} self.assertRaises(ValidationError, post.validate) post.info = {'the.title': 'test'} self.assertRaises(ValidationError, post.validate) post.info = {1: 'test'} self.assertRaises(ValidationError, post.validate) post.info = {'title': 'test'} post.save() post = BlogPost() post.info = {'details': {'test': 'test'}} post.save() post = BlogPost() post.info = {'details': {'test': 3}} post.save() self.assertEquals(BlogPost.objects.count(), 3) self.assertEquals(BlogPost.objects.filter(info__title__exact='test').count(), 1) self.assertEquals(BlogPost.objects.filter(info__details__test__exact='test').count(), 1) # Confirm handles non strings or non existing keys self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0) self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) post = BlogPost.objects.create(info={'title': 'original'}) post.info.update({'title': 'updated'}) post.save() post.reload() self.assertEquals('updated', post.info['title']) BlogPost.drop_collection() def test_dictfield_strict(self): """Ensure that dict field handles validation if provided a strict field type.""" class Simple(Document): mapping = DictField(field=IntField()) Simple.drop_collection() e = Simple() e.mapping['someint'] = 1 e.save() def create_invalid_mapping(): e.mapping['somestring'] = "abc" e.save() self.assertRaises(ValidationError, create_invalid_mapping) Simple.drop_collection() def test_dictfield_complex(self): """Ensure that the dict field can handle the complex types.""" class SettingBase(EmbeddedDocument): pass class StringSetting(SettingBase): value = StringField() class IntegerSetting(SettingBase): value = IntField() class Simple(Document): mapping = DictField() Simple.drop_collection() e = Simple() e.mapping['somestring'] = StringSetting(value='foo') e.mapping['someint'] = IntegerSetting(value=42) e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', 'float': 1.001, 'complex': IntegerSetting(value=42), 'list': [IntegerSetting(value=42), StringSetting(value='foo')]} e.save() e2 = Simple.objects.get(id=e.id) self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) # Test querying self.assertEquals(Simple.objects.filter(mapping__someint__value=42).count(), 1) self.assertEquals(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) self.assertEquals(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) # Confirm can update Simple.objects().update( set__mapping={"someint": IntegerSetting(value=10)}) Simple.objects().update( set__mapping__nested_dict__list__1=StringSetting(value='Boo')) self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) Simple.drop_collection() def test_mapfield(self): """Ensure that the MapField handles the declared type.""" class Simple(Document): mapping = MapField(IntField()) Simple.drop_collection() e = Simple() e.mapping['someint'] = 1 e.save() def create_invalid_mapping(): e.mapping['somestring'] = "abc" e.save() self.assertRaises(ValidationError, create_invalid_mapping) def create_invalid_class(): class NoDeclaredType(Document): mapping = MapField() self.assertRaises(ValidationError, create_invalid_class) Simple.drop_collection() def test_complex_mapfield(self): """Ensure that the MapField can handle complex declared types.""" class SettingBase(EmbeddedDocument): pass class StringSetting(SettingBase): value = StringField() class IntegerSetting(SettingBase): value = IntField() class Extensible(Document): mapping = MapField(EmbeddedDocumentField(SettingBase)) Extensible.drop_collection() e = Extensible() e.mapping['somestring'] = StringSetting(value='foo') e.mapping['someint'] = IntegerSetting(value=42) e.save() e2 = Extensible.objects.get(id=e.id) self.assertTrue(isinstance(e2.mapping['somestring'], StringSetting)) self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) def create_invalid_mapping(): e.mapping['someint'] = 123 e.save() self.assertRaises(ValidationError, create_invalid_mapping) Extensible.drop_collection() def test_embedded_mapfield_db_field(self): class Embedded(EmbeddedDocument): number = IntField(default=0, db_field='i') class Test(Document): my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field='x') Test.drop_collection() test = Test() test.my_map['DICTIONARY_KEY'] = Embedded(number=1) test.save() Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) test = Test.objects.get() self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) doc = self.db.test.find_one() self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) def test_embedded_db_field(self): class Embedded(EmbeddedDocument): number = IntField(default=0, db_field='i') class Test(Document): embedded = EmbeddedDocumentField(Embedded, db_field='x') Test.drop_collection() test = Test() test.embedded = Embedded(number=1) test.save() Test.objects.update_one(inc__embedded__number=1) test = Test.objects.get() self.assertEqual(test.embedded.number, 2) doc = self.db.test.find_one() self.assertEqual(doc['x']['i'], 2) def test_embedded_document_validation(self): """Ensure that invalid embedded documents cannot be assigned to embedded document fields. """ class Comment(EmbeddedDocument): content = StringField() class PersonPreferences(EmbeddedDocument): food = StringField(required=True) number = IntField() class Person(Document): name = StringField() preferences = EmbeddedDocumentField(PersonPreferences) person = Person(name='Test User') person.preferences = 'My Preferences' self.assertRaises(ValidationError, person.validate) # Check that only the right embedded doc works person.preferences = Comment(content='Nice blog post...') self.assertRaises(ValidationError, person.validate) # Check that the embedded doc is valid person.preferences = PersonPreferences() self.assertRaises(ValidationError, person.validate) person.preferences = PersonPreferences(food='Cheese', number=47) self.assertEqual(person.preferences.food, 'Cheese') person.validate() def test_embedded_document_inheritance(self): """Ensure that subclasses of embedded documents may be provided to EmbeddedDocumentFields of the superclass' type. """ class User(EmbeddedDocument): name = StringField() class PowerUser(User): power = IntField() class BlogPost(Document): content = StringField() author = EmbeddedDocumentField(User) post = BlogPost(content='What I did today...') post.author = User(name='Test User') post.author = PowerUser(name='Test User', power=47) def test_reference_validation(self): """Ensure that invalid docment objects cannot be assigned to reference fields. """ class User(Document): name = StringField() class BlogPost(Document): content = StringField() author = ReferenceField(User) User.drop_collection() BlogPost.drop_collection() self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) user = User(name='Test User') # Ensure that the referenced object must have been saved post1 = BlogPost(content='Chips and gravy taste good.') post1.author = user self.assertRaises(ValidationError, post1.save) # Check that an invalid object type cannot be used post2 = BlogPost(content='Chips and chilli taste good.') post1.author = post2 self.assertRaises(ValidationError, post1.validate) user.save() post1.author = user post1.save() post2.save() post1.author = post2 self.assertRaises(ValidationError, post1.validate) User.drop_collection() BlogPost.drop_collection() def test_list_item_dereference(self): """Ensure that DBRef items in ListFields are dereferenced. """ class User(Document): name = StringField() class Group(Document): members = ListField(ReferenceField(User)) User.drop_collection() Group.drop_collection() user1 = User(name='user1') user1.save() user2 = User(name='user2') user2.save() group = Group(members=[user1, user2]) group.save() group_obj = Group.objects.first() self.assertEqual(group_obj.members[0].name, user1.name) self.assertEqual(group_obj.members[1].name, user2.name) User.drop_collection() Group.drop_collection() def test_recursive_reference(self): """Ensure that ReferenceFields can reference their own documents. """ class Employee(Document): name = StringField() boss = ReferenceField('self') friends = ListField(ReferenceField('self')) bill = Employee(name='Bill Lumbergh') bill.save() michael = Employee(name='Michael Bolton') michael.save() samir = Employee(name='Samir Nagheenanajar') samir.save() friends = [michael, samir] peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) peter.save() peter = Employee.objects.with_id(peter.id) self.assertEqual(peter.boss, bill) self.assertEqual(peter.friends, friends) def test_recursive_embedding(self): """Ensure that EmbeddedDocumentFields can contain their own documents. """ class Tree(Document): name = StringField() children = ListField(EmbeddedDocumentField('TreeNode')) class TreeNode(EmbeddedDocument): name = StringField() children = ListField(EmbeddedDocumentField('self')) Tree.drop_collection() tree = Tree(name="Tree") first_child = TreeNode(name="Child 1") tree.children.append(first_child) second_child = TreeNode(name="Child 2") first_child.children.append(second_child) tree.save() tree = Tree.objects.first() self.assertEqual(len(tree.children), 1) self.assertEqual(len(tree.children[0].children), 1) third_child = TreeNode(name="Child 3") tree.children[0].children.append(third_child) tree.save() self.assertEqual(len(tree.children), 1) self.assertEqual(tree.children[0].name, first_child.name) self.assertEqual(tree.children[0].children[0].name, second_child.name) self.assertEqual(tree.children[0].children[1].name, third_child.name) # Test updating tree.children[0].name = 'I am Child 1' tree.children[0].children[0].name = 'I am Child 2' tree.children[0].children[1].name = 'I am Child 3' tree.save() self.assertEqual(tree.children[0].name, 'I am Child 1') self.assertEqual(tree.children[0].children[0].name, 'I am Child 2') self.assertEqual(tree.children[0].children[1].name, 'I am Child 3') # Test removal self.assertEqual(len(tree.children[0].children), 2) del(tree.children[0].children[1]) tree.save() self.assertEqual(len(tree.children[0].children), 1) tree.children[0].children.pop(0) tree.save() self.assertEqual(len(tree.children[0].children), 0) self.assertEqual(tree.children[0].children, []) tree.children[0].children.insert(0, third_child) tree.children[0].children.insert(0, second_child) tree.save() self.assertEqual(len(tree.children[0].children), 2) self.assertEqual(tree.children[0].children[0].name, second_child.name) self.assertEqual(tree.children[0].children[1].name, third_child.name) def test_undefined_reference(self): """Ensure that ReferenceFields may reference undefined Documents. """ class Product(Document): name = StringField() company = ReferenceField('Company') class Company(Document): name = StringField() Product.drop_collection() Company.drop_collection() ten_gen = Company(name='10gen') ten_gen.save() mongodb = Product(name='MongoDB', company=ten_gen) mongodb.save() me = Product(name='MongoEngine') me.save() obj = Product.objects(company=ten_gen).first() self.assertEqual(obj, mongodb) self.assertEqual(obj.company, ten_gen) obj = Product.objects(company=None).first() self.assertEqual(obj, me) obj, created = Product.objects.get_or_create(company=None) self.assertEqual(created, False) self.assertEqual(obj, me) def test_reference_query_conversion(self): """Ensure that ReferenceFields can be queried using objects and values of the type of the primary key of the referenced object. """ class Member(Document): user_num = IntField(primary_key=True) class BlogPost(Document): title = StringField() author = ReferenceField(Member) Member.drop_collection() BlogPost.drop_collection() m1 = Member(user_num=1) m1.save() m2 = Member(user_num=2) m2.save() post1 = BlogPost(title='post 1', author=m1) post1.save() post2 = BlogPost(title='post 2', author=m2) post2.save() post = BlogPost.objects(author=m1).first() self.assertEqual(post.id, post1.id) post = BlogPost.objects(author=m2).first() self.assertEqual(post.id, post2.id) Member.drop_collection() BlogPost.drop_collection() def test_generic_reference(self): """Ensure that a GenericReferenceField properly dereferences items. """ class Link(Document): title = StringField() meta = {'allow_inheritance': False} class Post(Document): title = StringField() class Bookmark(Document): bookmark_object = GenericReferenceField() Link.drop_collection() Post.drop_collection() Bookmark.drop_collection() link_1 = Link(title="Pitchfork") link_1.save() post_1 = Post(title="Behind the Scenes of the Pavement Reunion") post_1.save() bm = Bookmark(bookmark_object=post_1) bm.save() bm = Bookmark.objects(bookmark_object=post_1).first() self.assertEqual(bm.bookmark_object, post_1) self.assertTrue(isinstance(bm.bookmark_object, Post)) bm.bookmark_object = link_1 bm.save() bm = Bookmark.objects(bookmark_object=link_1).first() self.assertEqual(bm.bookmark_object, link_1) self.assertTrue(isinstance(bm.bookmark_object, Link)) Link.drop_collection() Post.drop_collection() Bookmark.drop_collection() def test_generic_reference_list(self): """Ensure that a ListField properly dereferences generic references. """ class Link(Document): title = StringField() class Post(Document): title = StringField() class User(Document): bookmarks = ListField(GenericReferenceField()) Link.drop_collection() Post.drop_collection() User.drop_collection() link_1 = Link(title="Pitchfork") link_1.save() post_1 = Post(title="Behind the Scenes of the Pavement Reunion") post_1.save() user = User(bookmarks=[post_1, link_1]) user.save() user = User.objects(bookmarks__all=[post_1, link_1]).first() self.assertEqual(user.bookmarks[0], post_1) self.assertEqual(user.bookmarks[1], link_1) Link.drop_collection() Post.drop_collection() User.drop_collection() def test_generic_reference_document_not_registered(self): """Ensure dereferencing out of the document registry throws a `NotRegistered` error. """ class Link(Document): title = StringField() class User(Document): bookmarks = ListField(GenericReferenceField()) Link.drop_collection() User.drop_collection() link_1 = Link(title="Pitchfork") link_1.save() user = User(bookmarks=[link_1]) user.save() # Mimic User and Link definitions being in a different file # and the Link model not being imported in the User file. del(_document_registry["Link"]) user = User.objects.first() try: user.bookmarks raise AssertionError("Link was removed from the registry") except NotRegistered: pass Link.drop_collection() User.drop_collection() def test_generic_reference_is_none(self): class Person(Document): name = StringField() city = GenericReferenceField() Person.drop_collection() Person(name="Wilson Jr").save() self.assertEquals(repr(Person.objects(city=None)), "[]") def test_generic_reference_choices(self): """Ensure that a GenericReferenceField can handle choices """ class Link(Document): title = StringField() class Post(Document): title = StringField() class Bookmark(Document): bookmark_object = GenericReferenceField(choices=(Post,)) Link.drop_collection() Post.drop_collection() Bookmark.drop_collection() link_1 = Link(title="Pitchfork") link_1.save() post_1 = Post(title="Behind the Scenes of the Pavement Reunion") post_1.save() bm = Bookmark(bookmark_object=link_1) self.assertRaises(ValidationError, bm.validate) bm = Bookmark(bookmark_object=post_1) bm.save() bm = Bookmark.objects.first() self.assertEqual(bm.bookmark_object, post_1) def test_generic_reference_list_choices(self): """Ensure that a ListField properly dereferences generic references and respects choices. """ class Link(Document): title = StringField() class Post(Document): title = StringField() class User(Document): bookmarks = ListField(GenericReferenceField(choices=(Post,))) Link.drop_collection() Post.drop_collection() User.drop_collection() link_1 = Link(title="Pitchfork") link_1.save() post_1 = Post(title="Behind the Scenes of the Pavement Reunion") post_1.save() user = User(bookmarks=[link_1]) self.assertRaises(ValidationError, user.validate) user = User(bookmarks=[post_1]) user.save() user = User.objects.first() self.assertEqual(user.bookmarks, [post_1]) Link.drop_collection() Post.drop_collection() User.drop_collection() def test_binary_fields(self): """Ensure that binary fields can be stored and retrieved. """ class Attachment(Document): content_type = StringField() blob = BinaryField() BLOB = '\xe6\x00\xc4\xff\x07' MIME_TYPE = 'application/octet-stream' Attachment.drop_collection() attachment = Attachment(content_type=MIME_TYPE, blob=BLOB) attachment.save() attachment_1 = Attachment.objects().first() self.assertEqual(MIME_TYPE, attachment_1.content_type) self.assertEqual(BLOB, attachment_1.blob) Attachment.drop_collection() def test_binary_validation(self): """Ensure that invalid values cannot be assigned to binary fields. """ class Attachment(Document): blob = BinaryField() class AttachmentRequired(Document): blob = BinaryField(required=True) class AttachmentSizeLimit(Document): blob = BinaryField(max_bytes=4) Attachment.drop_collection() AttachmentRequired.drop_collection() AttachmentSizeLimit.drop_collection() attachment = Attachment() attachment.validate() attachment.blob = 2 self.assertRaises(ValidationError, attachment.validate) attachment_required = AttachmentRequired() self.assertRaises(ValidationError, attachment_required.validate) attachment_required.blob = '\xe6\x00\xc4\xff\x07' attachment_required.validate() attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07') self.assertRaises(ValidationError, attachment_size_limit.validate) attachment_size_limit.blob = '\xe6\x00\xc4\xff' attachment_size_limit.validate() Attachment.drop_collection() AttachmentRequired.drop_collection() AttachmentSizeLimit.drop_collection() def test_choices_validation(self): """Ensure that value is in a container of allowed values. """ class Shirt(Document): size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) Shirt.drop_collection() shirt = Shirt() shirt.validate() shirt.size = "S" shirt.validate() shirt.size = "XS" self.assertRaises(ValidationError, shirt.validate) Shirt.drop_collection() def test_choices_get_field_display(self): """Test dynamic helper for returning the display value of a choices field. """ class Shirt(Document): size = StringField(max_length=3, choices=(('S', 'Small'), ('M', 'Medium'), ('L', 'Large'), ('XL', 'Extra Large'), ('XXL', 'Extra Extra Large'))) style = StringField(max_length=3, choices=(('S', 'Small'), ('B', 'Baggy'), ('W', 'wide')), default='S') Shirt.drop_collection() shirt = Shirt() self.assertEqual(shirt.get_size_display(), None) self.assertEqual(shirt.get_style_display(), 'Small') shirt.size = "XXL" shirt.style = "B" self.assertEqual(shirt.get_size_display(), 'Extra Extra Large') self.assertEqual(shirt.get_style_display(), 'Baggy') # Set as Z - an invalid choice shirt.size = "Z" shirt.style = "Z" self.assertEqual(shirt.get_size_display(), 'Z') self.assertEqual(shirt.get_style_display(), 'Z') self.assertRaises(ValidationError, shirt.validate) Shirt.drop_collection() def test_simple_choices_validation(self): """Ensure that value is in a container of allowed values. """ class Shirt(Document): size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL')) Shirt.drop_collection() shirt = Shirt() shirt.validate() shirt.size = "S" shirt.validate() shirt.size = "XS" self.assertRaises(ValidationError, shirt.validate) Shirt.drop_collection() def test_simple_choices_get_field_display(self): """Test dynamic helper for returning the display value of a choices field. """ class Shirt(Document): size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL')) style = StringField(max_length=3, choices=('Small', 'Baggy', 'wide'), default='Small') Shirt.drop_collection() shirt = Shirt() self.assertEqual(shirt.get_size_display(), None) self.assertEqual(shirt.get_style_display(), 'Small') shirt.size = "XXL" shirt.style = "Baggy" self.assertEqual(shirt.get_size_display(), 'XXL') self.assertEqual(shirt.get_style_display(), 'Baggy') # Set as Z - an invalid choice shirt.size = "Z" shirt.style = "Z" self.assertEqual(shirt.get_size_display(), 'Z') self.assertEqual(shirt.get_style_display(), 'Z') self.assertRaises(ValidationError, shirt.validate) Shirt.drop_collection() def test_file_fields(self): """Ensure that file fields can be written to and their data retrieved """ class PutFile(Document): file = FileField() class StreamFile(Document): file = FileField() class SetFile(Document): file = FileField() text = 'Hello, World!' more_text = 'Foo Bar' content_type = 'text/plain' PutFile.drop_collection() StreamFile.drop_collection() SetFile.drop_collection() putfile = PutFile() putfile.file.put(text, content_type=content_type) putfile.save() putfile.validate() result = PutFile.objects.first() self.assertTrue(putfile == result) self.assertEquals(result.file.read(), text) self.assertEquals(result.file.content_type, content_type) result.file.delete() # Remove file from GridFS PutFile.objects.delete() # Ensure file-like objects are stored putfile = PutFile() putstring = StringIO.StringIO() putstring.write(text) putstring.seek(0) putfile.file.put(putstring, content_type=content_type) putfile.save() putfile.validate() result = PutFile.objects.first() self.assertTrue(putfile == result) self.assertEquals(result.file.read(), text) self.assertEquals(result.file.content_type, content_type) result.file.delete() streamfile = StreamFile() streamfile.file.new_file(content_type=content_type) streamfile.file.write(text) streamfile.file.write(more_text) streamfile.file.close() streamfile.save() streamfile.validate() result = StreamFile.objects.first() self.assertTrue(streamfile == result) self.assertEquals(result.file.read(), text + more_text) self.assertEquals(result.file.content_type, content_type) result.file.seek(0) self.assertEquals(result.file.tell(), 0) self.assertEquals(result.file.read(len(text)), text) self.assertEquals(result.file.tell(), len(text)) self.assertEquals(result.file.read(len(more_text)), more_text) self.assertEquals(result.file.tell(), len(text + more_text)) result.file.delete() # Ensure deleted file returns None self.assertTrue(result.file.read() == None) setfile = SetFile() setfile.file = text setfile.save() setfile.validate() result = SetFile.objects.first() self.assertTrue(setfile == result) self.assertEquals(result.file.read(), text) # Try replacing file with new one result.file.replace(more_text) result.save() result.validate() result = SetFile.objects.first() self.assertTrue(setfile == result) self.assertEquals(result.file.read(), more_text) result.file.delete() PutFile.drop_collection() StreamFile.drop_collection() SetFile.drop_collection() # Make sure FileField is optional and not required class DemoFile(Document): file = FileField() DemoFile.objects.create() def test_file_field_no_default(self): class GridDocument(Document): the_file = FileField() GridDocument.drop_collection() with tempfile.TemporaryFile() as f: f.write("Hello World!") f.flush() # Test without default doc_a = GridDocument() doc_a.save() doc_b = GridDocument.objects.with_id(doc_a.id) doc_b.the_file.replace(f, filename='doc_b') doc_b.save() self.assertNotEquals(doc_b.the_file.grid_id, None) # Test it matches doc_c = GridDocument.objects.with_id(doc_b.id) self.assertEquals(doc_b.the_file.grid_id, doc_c.the_file.grid_id) # Test with default doc_d = GridDocument(the_file='') doc_d.save() doc_e = GridDocument.objects.with_id(doc_d.id) self.assertEquals(doc_d.the_file.grid_id, doc_e.the_file.grid_id) doc_e.the_file.replace(f, filename='doc_e') doc_e.save() doc_f = GridDocument.objects.with_id(doc_e.id) self.assertEquals(doc_e.the_file.grid_id, doc_f.the_file.grid_id) db = GridDocument._get_db() grid_fs = gridfs.GridFS(db) self.assertEquals(['doc_b', 'doc_e'], grid_fs.list()) def test_file_uniqueness(self): """Ensure that each instance of a FileField is unique """ class TestFile(Document): name = StringField() file = FileField() # First instance testfile = TestFile() testfile.name = "Hello, World!" testfile.file.put('Hello, World!') testfile.save() # Second instance testfiledupe = TestFile() data = testfiledupe.file.read() # Should be None self.assertTrue(testfile.name != testfiledupe.name) self.assertTrue(testfile.file.read() != data) TestFile.drop_collection() def test_file_boolean(self): """Ensure that a boolean test of a FileField indicates its presence """ class TestFile(Document): file = FileField() testfile = TestFile() self.assertFalse(bool(testfile.file)) testfile.file = 'Hello, World!' testfile.file.content_type = 'text/plain' testfile.save() self.assertTrue(bool(testfile.file)) TestFile.drop_collection() def test_image_field(self): class TestImage(Document): image = ImageField() TestImage.drop_collection() t = TestImage() t.image.put(open(TEST_IMAGE_PATH, 'r')) t.save() t = TestImage.objects.first() self.assertEquals(t.image.format, 'PNG') w, h = t.image.size self.assertEquals(w, 371) self.assertEquals(h, 76) t.image.delete() def test_image_field_resize(self): class TestImage(Document): image = ImageField(size=(185, 37)) TestImage.drop_collection() t = TestImage() t.image.put(open(TEST_IMAGE_PATH, 'r')) t.save() t = TestImage.objects.first() self.assertEquals(t.image.format, 'PNG') w, h = t.image.size self.assertEquals(w, 185) self.assertEquals(h, 37) t.image.delete() def test_image_field_thumbnail(self): class TestImage(Document): image = ImageField(thumbnail_size=(92, 18)) TestImage.drop_collection() t = TestImage() t.image.put(open(TEST_IMAGE_PATH, 'r')) t.save() t = TestImage.objects.first() self.assertEquals(t.image.thumbnail.format, 'PNG') self.assertEquals(t.image.thumbnail.width, 92) self.assertEquals(t.image.thumbnail.height, 18) t.image.delete() def test_file_multidb(self): register_connection('testfiles', 'testfiles') class TestFile(Document): name = StringField() file = FileField(db_alias="testfiles", collection_name="macumba") TestFile.drop_collection() # delete old filesystem get_db("testfiles").macumba.files.drop() get_db("testfiles").macumba.chunks.drop() # First instance testfile = TestFile() testfile.name = "Hello, World!" testfile.file.put('Hello, World!', name="hello.txt") testfile.save() data = get_db("testfiles").macumba.files.find_one() self.assertEquals(data.get('name'), 'hello.txt') testfile = TestFile.objects.first() self.assertEquals(testfile.file.read(), 'Hello, World!') def test_geo_indexes(self): """Ensure that indexes are created automatically for GeoPointFields. """ class Event(Document): title = StringField() location = GeoPointField() Event.drop_collection() event = Event(title="Coltrane Motion @ Double Door", location=[41.909889, -87.677137]) event.save() info = Event.objects._collection.index_information() self.assertTrue(u'location_2d' in info) self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')]) Event.drop_collection() def test_geo_embedded_indexes(self): """Ensure that indexes are created automatically for GeoPointFields on embedded documents. """ class Venue(EmbeddedDocument): location = GeoPointField() name = StringField() class Event(Document): title = StringField() venue = EmbeddedDocumentField(Venue) Event.drop_collection() venue = Venue(name="Double Door", location=[41.909889, -87.677137]) event = Event(title="Coltrane Motion", venue=venue) event.save() info = Event.objects._collection.index_information() self.assertTrue(u'location_2d' in info) self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')]) def test_ensure_unique_default_instances(self): """Ensure that every field has it's own unique default instance.""" class D(Document): data = DictField() data2 = DictField(default=lambda: {}) d1 = D() d1.data['foo'] = 'bar' d1.data2['foo'] = 'bar' d2 = D() self.assertEqual(d2.data, {}) self.assertEqual(d2.data2, {}) def test_sequence_field(self): class Person(Document): id = SequenceField(primary_key=True) name = StringField() self.db['mongoengine.counters'].drop() Person.drop_collection() for x in xrange(10): p = Person(name="Person %s" % x) p.save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) def test_multiple_sequence_fields(self): class Person(Document): id = SequenceField(primary_key=True) counter = SequenceField() name = StringField() self.db['mongoengine.counters'].drop() Person.drop_collection() for x in xrange(10): p = Person(name="Person %s" % x) p.save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) counters = [i.counter for i in Person.objects] self.assertEqual(counters, range(1, 11)) c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) def test_sequence_fields_reload(self): class Animal(Document): counter = SequenceField() type = StringField() self.db['mongoengine.counters'].drop() Animal.drop_collection() a = Animal(type="Boi") a.save() self.assertEqual(a.counter, 1) a.reload() self.assertEqual(a.counter, 1) a.counter = None self.assertEqual(a.counter, 2) a.save() self.assertEqual(a.counter, 2) a = Animal.objects.first() self.assertEqual(a.counter, 2) a.reload() self.assertEqual(a.counter, 2) def test_multiple_sequence_fields_on_docs(self): class Animal(Document): id = SequenceField(primary_key=True) class Person(Document): id = SequenceField(primary_key=True) self.db['mongoengine.counters'].drop() Animal.drop_collection() Person.drop_collection() for x in xrange(10): a = Animal(name="Animal %s" % x) a.save() p = Person(name="Person %s" % x) p.save() c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) self.assertEqual(c['next'], 10) ids = [i.id for i in Person.objects] self.assertEqual(ids, range(1, 11)) id = [i.id for i in Animal.objects] self.assertEqual(id, range(1, 11)) c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) self.assertEqual(c['next'], 10) c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) self.assertEqual(c['next'], 10) def test_generic_embedded_document(self): class Car(EmbeddedDocument): name = StringField() class Dish(EmbeddedDocument): food = StringField(required=True) number = IntField() class Person(Document): name = StringField() like = GenericEmbeddedDocumentField() Person.drop_collection() person = Person(name='Test User') person.like = Car(name='Fiat') person.save() person = Person.objects.first() self.assertTrue(isinstance(person.like, Car)) person.like = Dish(food="arroz", number=15) person.save() person = Person.objects.first() self.assertTrue(isinstance(person.like, Dish)) def test_generic_embedded_document_choices(self): """Ensure you can limit GenericEmbeddedDocument choices """ class Car(EmbeddedDocument): name = StringField() class Dish(EmbeddedDocument): food = StringField(required=True) number = IntField() class Person(Document): name = StringField() like = GenericEmbeddedDocumentField(choices=(Dish,)) Person.drop_collection() person = Person(name='Test User') person.like = Car(name='Fiat') self.assertRaises(ValidationError, person.validate) person.like = Dish(food="arroz", number=15) person.save() person = Person.objects.first() self.assertTrue(isinstance(person.like, Dish)) def test_generic_list_embedded_document_choices(self): """Ensure you can limit GenericEmbeddedDocument choices inside a list field """ class Car(EmbeddedDocument): name = StringField() class Dish(EmbeddedDocument): food = StringField(required=True) number = IntField() class Person(Document): name = StringField() likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,))) Person.drop_collection() person = Person(name='Test User') person.likes = [Car(name='Fiat')] self.assertRaises(ValidationError, person.validate) person.likes = [Dish(food="arroz", number=15)] person.save() person = Person.objects.first() self.assertTrue(isinstance(person.likes[0], Dish)) def test_recursive_validation(self): """Ensure that a validation result to_dict is available. """ class Author(EmbeddedDocument): name = StringField(required=True) class Comment(EmbeddedDocument): author = EmbeddedDocumentField(Author, required=True) content = StringField(required=True) class Post(Document): title = StringField(required=True) comments = ListField(EmbeddedDocumentField(Comment)) bob = Author(name='Bob') post = Post(title='hello world') post.comments.append(Comment(content='hello', author=bob)) post.comments.append(Comment(author=bob)) try: post.validate() except ValidationError, error: pass # ValidationError.errors property self.assertTrue(hasattr(error, 'errors')) self.assertTrue(isinstance(error.errors, dict)) self.assertTrue('comments' in error.errors) self.assertTrue(1 in error.errors['comments']) self.assertTrue(isinstance(error.errors['comments'][1]['content'], ValidationError)) # ValidationError.schema property error_dict = error.to_dict() self.assertTrue(isinstance(error_dict, dict)) self.assertTrue('comments' in error_dict) self.assertTrue(1 in error_dict['comments']) self.assertTrue('content' in error_dict['comments'][1]) self.assertEquals(error_dict['comments'][1]['content'], u'Field is required ("content")') post.comments[1].content = 'here we go' post.validate() if __name__ == '__main__': unittest.main() MongoEngine-mongoengine-7a1b110/tests/test_queryset.py000066400000000000000000003455131177143177100232200ustar00rootroot00000000000000# -*- coding: utf-8 -*- import unittest import pymongo from bson import ObjectId from datetime import datetime, timedelta from mongoengine.queryset import (QuerySet, QuerySetManager, MultipleObjectsReturned, DoesNotExist, QueryFieldList) from mongoengine import * from mongoengine.connection import get_connection from mongoengine.tests import query_counter class QuerySetTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') class Person(Document): name = StringField() age = IntField() meta = {'allow_inheritance': True} self.Person = Person def test_initialisation(self): """Ensure that a QuerySet is correctly initialised by QuerySetManager. """ self.assertTrue(isinstance(self.Person.objects, QuerySet)) self.assertEqual(self.Person.objects._collection.name, self.Person._get_collection_name()) self.assertTrue(isinstance(self.Person.objects._collection, pymongo.collection.Collection)) def test_transform_query(self): """Ensure that the _transform_query function operates correctly. """ self.assertEqual(QuerySet._transform_query(name='test', age=30), {'name': 'test', 'age': 30}) self.assertEqual(QuerySet._transform_query(age__lt=30), {'age': {'$lt': 30}}) self.assertEqual(QuerySet._transform_query(age__gt=20, age__lt=50), {'age': {'$gt': 20, '$lt': 50}}) self.assertEqual(QuerySet._transform_query(age=20, age__gt=50), {'age': 20}) self.assertEqual(QuerySet._transform_query(friend__age__gte=30), {'friend.age': {'$gte': 30}}) self.assertEqual(QuerySet._transform_query(name__exists=True), {'name': {'$exists': True}}) def test_find(self): """Ensure that a query returns a valid set of results. """ person1 = self.Person(name="User A", age=20) person1.save() person2 = self.Person(name="User B", age=30) person2.save() # Find all people in the collection people = self.Person.objects self.assertEqual(len(people), 2) results = list(people) self.assertTrue(isinstance(results[0], self.Person)) self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) self.assertEqual(results[0].name, "User A") self.assertEqual(results[0].age, 20) self.assertEqual(results[1].name, "User B") self.assertEqual(results[1].age, 30) # Use a query to filter the people found to just person1 people = self.Person.objects(age=20) self.assertEqual(len(people), 1) person = people.next() self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) # Test limit people = list(self.Person.objects.limit(1)) self.assertEqual(len(people), 1) self.assertEqual(people[0].name, 'User A') # Test skip people = list(self.Person.objects.skip(1)) self.assertEqual(len(people), 1) self.assertEqual(people[0].name, 'User B') person3 = self.Person(name="User C", age=40) person3.save() # Test slice limit people = list(self.Person.objects[:2]) self.assertEqual(len(people), 2) self.assertEqual(people[0].name, 'User A') self.assertEqual(people[1].name, 'User B') # Test slice skip people = list(self.Person.objects[1:]) self.assertEqual(len(people), 2) self.assertEqual(people[0].name, 'User B') self.assertEqual(people[1].name, 'User C') # Test slice limit and skip people = list(self.Person.objects[1:2]) self.assertEqual(len(people), 1) self.assertEqual(people[0].name, 'User B') people = list(self.Person.objects[1:1]) self.assertEqual(len(people), 0) # Test slice out of range people = list(self.Person.objects[80000:80001]) self.assertEqual(len(people), 0) # Test larger slice __repr__ self.Person.objects.delete() for i in xrange(55): self.Person(name='A%s' % i, age=i).save() self.assertEqual(len(self.Person.objects), 55) self.assertEqual("Person object", "%s" % self.Person.objects[0]) self.assertEqual("[, ]", "%s" % self.Person.objects[1:3]) self.assertEqual("[, ]", "%s" % self.Person.objects[51:53]) def test_find_one(self): """Ensure that a query using find_one returns a valid result. """ person1 = self.Person(name="User A", age=20) person1.save() person2 = self.Person(name="User B", age=30) person2.save() # Retrieve the first person from the database person = self.Person.objects.first() self.assertTrue(isinstance(person, self.Person)) self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) # Use a query to filter the people found to just person2 person = self.Person.objects(age=30).first() self.assertEqual(person.name, "User B") person = self.Person.objects(age__lt=30).first() self.assertEqual(person.name, "User A") # Use array syntax person = self.Person.objects[0] self.assertEqual(person.name, "User A") person = self.Person.objects[1] self.assertEqual(person.name, "User B") self.assertRaises(IndexError, self.Person.objects.__getitem__, 2) # Find a document using just the object id person = self.Person.objects.with_id(person1.id) self.assertEqual(person.name, "User A") self.assertRaises(InvalidQueryError, self.Person.objects(name="User A").with_id, person1.id) def test_find_only_one(self): """Ensure that a query using ``get`` returns at most one result. """ # Try retrieving when no objects exists self.assertRaises(DoesNotExist, self.Person.objects.get) self.assertRaises(self.Person.DoesNotExist, self.Person.objects.get) person1 = self.Person(name="User A", age=20) person1.save() person2 = self.Person(name="User B", age=30) person2.save() # Retrieve the first person from the database self.assertRaises(MultipleObjectsReturned, self.Person.objects.get) self.assertRaises(self.Person.MultipleObjectsReturned, self.Person.objects.get) # Use a query to filter the people found to just person2 person = self.Person.objects.get(age=30) self.assertEqual(person.name, "User B") person = self.Person.objects.get(age__lt=30) self.assertEqual(person.name, "User A") def test_find_array_position(self): """Ensure that query by array position works. """ class Comment(EmbeddedDocument): name = StringField() class Post(EmbeddedDocument): comments = ListField(EmbeddedDocumentField(Comment)) class Blog(Document): tags = ListField(StringField()) posts = ListField(EmbeddedDocumentField(Post)) Blog.drop_collection() Blog.objects.create(tags=['a', 'b']) self.assertEqual(len(Blog.objects(tags__0='a')), 1) self.assertEqual(len(Blog.objects(tags__0='b')), 0) self.assertEqual(len(Blog.objects(tags__1='a')), 0) self.assertEqual(len(Blog.objects(tags__1='b')), 1) Blog.drop_collection() comment1 = Comment(name='testa') comment2 = Comment(name='testb') post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blog1 = Blog.objects.create(posts=[post1, post2]) blog2 = Blog.objects.create(posts=[post2, post1]) blog = Blog.objects(posts__0__comments__0__name='testa').get() self.assertEqual(blog, blog1) query = Blog.objects(posts__1__comments__1__name='testb') self.assertEqual(len(query), 2) query = Blog.objects(posts__1__comments__1__name='testa') self.assertEqual(len(query), 0) query = Blog.objects(posts__0__comments__1__name='testa') self.assertEqual(len(query), 0) Blog.drop_collection() def test_update_write_options(self): """Test that passing write_options works""" self.Person.drop_collection() write_options = {"fsync": True} author, created = self.Person.objects.get_or_create( name='Test User', write_options=write_options) author.save(write_options=write_options) self.Person.objects.update(set__name='Ross', write_options=write_options) author = self.Person.objects.first() self.assertEquals(author.name, 'Ross') self.Person.objects.update_one(set__name='Test User', write_options=write_options) author = self.Person.objects.first() self.assertEquals(author.name, 'Test User') def test_update_update_has_a_value(self): """Test to ensure that update is passed a value to update to""" self.Person.drop_collection() author = self.Person(name='Test User') author.save() def update_raises(): self.Person.objects(pk=author.pk).update({}) def update_one_raises(): self.Person.objects(pk=author.pk).update_one({}) self.assertRaises(OperationError, update_raises) self.assertRaises(OperationError, update_one_raises) def test_update_array_position(self): """Ensure that updating by array position works. Check update() and update_one() can take syntax like: set__posts__1__comments__1__name="testc" Check that it only works for ListFields. """ class Comment(EmbeddedDocument): name = StringField() class Post(EmbeddedDocument): comments = ListField(EmbeddedDocumentField(Comment)) class Blog(Document): tags = ListField(StringField()) posts = ListField(EmbeddedDocumentField(Post)) Blog.drop_collection() comment1 = Comment(name='testa') comment2 = Comment(name='testb') post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blog1 = Blog.objects.create(posts=[post1, post2]) blog2 = Blog.objects.create(posts=[post2, post1]) # Update all of the first comments of second posts of all blogs blog = Blog.objects().update(set__posts__1__comments__0__name="testc") testc_blogs = Blog.objects(posts__1__comments__0__name="testc") self.assertEqual(len(testc_blogs), 2) Blog.drop_collection() blog1 = Blog.objects.create(posts=[post1, post2]) blog2 = Blog.objects.create(posts=[post2, post1]) # Update only the first blog returned by the query blog = Blog.objects().update_one( set__posts__1__comments__1__name="testc") testc_blogs = Blog.objects(posts__1__comments__1__name="testc") self.assertEqual(len(testc_blogs), 1) # Check that using this indexing syntax on a non-list fails def non_list_indexing(): Blog.objects().update(set__posts__1__comments__0__name__1="asdf") self.assertRaises(InvalidQueryError, non_list_indexing) Blog.drop_collection() def test_update_using_positional_operator(self): """Ensure that the list fields can be updated using the positional operator.""" class Comment(EmbeddedDocument): by = StringField() votes = IntField() class BlogPost(Document): title = StringField() comments = ListField(EmbeddedDocumentField(Comment)) BlogPost.drop_collection() c1 = Comment(by="joe", votes=3) c2 = Comment(by="jane", votes=7) BlogPost(title="ABC", comments=[c1, c2]).save() BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1) post = BlogPost.objects.first() self.assertEquals(post.comments[1].by, 'jane') self.assertEquals(post.comments[1].votes, 8) # Currently the $ operator only applies to the first matched item in # the query class Simple(Document): x = ListField() Simple.drop_collection() Simple(x=[1, 2, 3, 2]).save() Simple.objects(x=2).update(inc__x__S=1) simple = Simple.objects.first() self.assertEquals(simple.x, [1, 3, 3, 2]) Simple.drop_collection() # You can set multiples Simple.drop_collection() Simple(x=[1, 2, 3, 4]).save() Simple(x=[2, 3, 4, 5]).save() Simple(x=[3, 4, 5, 6]).save() Simple(x=[4, 5, 6, 7]).save() Simple.objects(x=3).update(set__x__S=0) s = Simple.objects() self.assertEquals(s[0].x, [1, 2, 0, 4]) self.assertEquals(s[1].x, [2, 0, 4, 5]) self.assertEquals(s[2].x, [0, 4, 5, 6]) self.assertEquals(s[3].x, [4, 5, 6, 7]) # Using "$unset" with an expression like this "array.$" will result in # the array item becoming None, not being removed. Simple.drop_collection() Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save() Simple.objects(x=3).update(unset__x__S=1) simple = Simple.objects.first() self.assertEquals(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) # Nested updates arent supported yet.. def update_nested(): Simple.drop_collection() Simple(x=[{'test': [1, 2, 3, 4]}]).save() Simple.objects(x__test=2).update(set__x__S__test__S=3) self.assertEquals(simple.x, [1, 2, 3, 4]) self.assertRaises(OperationError, update_nested) Simple.drop_collection() def test_update_using_positional_operator_embedded_document(self): """Ensure that the embedded documents can be updated using the positional operator.""" class Vote(EmbeddedDocument): score = IntField() class Comment(EmbeddedDocument): by = StringField() votes = EmbeddedDocumentField(Vote) class BlogPost(Document): title = StringField() comments = ListField(EmbeddedDocumentField(Comment)) BlogPost.drop_collection() c1 = Comment(by="joe", votes=Vote(score=3)) c2 = Comment(by="jane", votes=Vote(score=7)) BlogPost(title="ABC", comments=[c1, c2]).save() BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4)) post = BlogPost.objects.first() self.assertEquals(post.comments[0].by, 'joe') self.assertEquals(post.comments[0].votes.score, 4) def test_mapfield_update(self): """Ensure that the MapField can be updated.""" class Member(EmbeddedDocument): gender = StringField() age = IntField() class Club(Document): members = MapField(EmbeddedDocumentField(Member)) Club.drop_collection() club = Club() club.members['John'] = Member(gender="M", age=13) club.save() Club.objects().update( set__members={"John": Member(gender="F", age=14)}) club = Club.objects().first() self.assertEqual(club.members['John'].gender, "F") self.assertEqual(club.members['John'].age, 14) def test_dictfield_update(self): """Ensure that the DictField can be updated.""" class Club(Document): members = DictField() club = Club() club.members['John'] = dict(gender="M", age=13) club.save() Club.objects().update( set__members={"John": dict(gender="F", age=14)}) club = Club.objects().first() self.assertEqual(club.members['John']['gender'], "F") self.assertEqual(club.members['John']['age'], 14) def test_get_or_create(self): """Ensure that ``get_or_create`` returns one result or creates a new document. """ person1 = self.Person(name="User A", age=20) person1.save() person2 = self.Person(name="User B", age=30) person2.save() # Retrieve the first person from the database self.assertRaises(MultipleObjectsReturned, self.Person.objects.get_or_create) self.assertRaises(self.Person.MultipleObjectsReturned, self.Person.objects.get_or_create) # Use a query to filter the people found to just person2 person, created = self.Person.objects.get_or_create(age=30) self.assertEqual(person.name, "User B") self.assertEqual(created, False) person, created = self.Person.objects.get_or_create(age__lt=30) self.assertEqual(person.name, "User A") self.assertEqual(created, False) # Try retrieving when no objects exists - new doc should be created kwargs = dict(age=50, defaults={'name': 'User C'}) person, created = self.Person.objects.get_or_create(**kwargs) self.assertEqual(created, True) person = self.Person.objects.get(age=50) self.assertEqual(person.name, "User C") def test_bulk_insert(self): """Ensure that bulk insert works """ class Comment(EmbeddedDocument): name = StringField() class Post(EmbeddedDocument): comments = ListField(EmbeddedDocumentField(Comment)) class Blog(Document): title = StringField(unique=True) tags = ListField(StringField()) posts = ListField(EmbeddedDocumentField(Post)) Blog.drop_collection() # Recreates the collection self.assertEqual(0, Blog.objects.count()) with query_counter() as q: self.assertEqual(q, 0) comment1 = Comment(name='testa') comment2 = Comment(name='testb') post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blogs = [] for i in xrange(1, 100): blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) Blog.objects.insert(blogs, load_bulk=False) self.assertEqual(q, 1) # 1 for the insert Blog.objects.insert(blogs) self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total) Blog.drop_collection() comment1 = Comment(name='testa') comment2 = Comment(name='testb') post1 = Post(comments=[comment1, comment2]) post2 = Post(comments=[comment2, comment2]) blog1 = Blog(title="code", posts=[post1, post2]) blog2 = Blog(title="mongodb", posts=[post2, post1]) blog1, blog2 = Blog.objects.insert([blog1, blog2]) self.assertEqual(blog1.title, "code") self.assertEqual(blog2.title, "mongodb") self.assertEqual(Blog.objects.count(), 2) # test handles people trying to upsert def throw_operation_error(): blogs = Blog.objects Blog.objects.insert(blogs) self.assertRaises(OperationError, throw_operation_error) # test handles other classes being inserted def throw_operation_error_wrong_doc(): class Author(Document): pass Blog.objects.insert(Author()) self.assertRaises(OperationError, throw_operation_error_wrong_doc) def throw_operation_error_not_a_document(): Blog.objects.insert("HELLO WORLD") self.assertRaises(OperationError, throw_operation_error_not_a_document) Blog.drop_collection() blog1 = Blog(title="code", posts=[post1, post2]) blog1 = Blog.objects.insert(blog1) self.assertEqual(blog1.title, "code") self.assertEqual(Blog.objects.count(), 1) Blog.drop_collection() blog1 = Blog(title="code", posts=[post1, post2]) obj_id = Blog.objects.insert(blog1, load_bulk=False) self.assertEquals(obj_id.__class__.__name__, 'ObjectId') Blog.drop_collection() post3 = Post(comments=[comment1, comment1]) blog1 = Blog(title="foo", posts=[post1, post2]) blog2 = Blog(title="bar", posts=[post2, post3]) blog3 = Blog(title="baz", posts=[post1, post2]) Blog.objects.insert([blog1, blog2]) def throw_operation_error_not_unique(): Blog.objects.insert([blog2, blog3], safe=True) self.assertRaises(OperationError, throw_operation_error_not_unique) self.assertEqual(Blog.objects.count(), 2) Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) self.assertEqual(Blog.objects.count(), 3) def test_slave_okay(self): """Ensures that a query can take slave_okay syntax """ person1 = self.Person(name="User A", age=20) person1.save() person2 = self.Person(name="User B", age=30) person2.save() # Retrieve the first person from the database person = self.Person.objects.slave_okay(True).first() self.assertTrue(isinstance(person, self.Person)) self.assertEqual(person.name, "User A") self.assertEqual(person.age, 20) def test_cursor_args(self): """Ensures the cursor args can be set as expected """ p = self.Person.objects # Check default self.assertEqual(p._cursor_args, {'snapshot': False, 'slave_okay': False, 'timeout': True}) p.snapshot(False).slave_okay(False).timeout(False) self.assertEqual(p._cursor_args, {'snapshot': False, 'slave_okay': False, 'timeout': False}) p.snapshot(True).slave_okay(False).timeout(False) self.assertEqual(p._cursor_args, {'snapshot': True, 'slave_okay': False, 'timeout': False}) p.snapshot(True).slave_okay(True).timeout(False) self.assertEqual(p._cursor_args, {'snapshot': True, 'slave_okay': True, 'timeout': False}) p.snapshot(True).slave_okay(True).timeout(True) self.assertEqual(p._cursor_args, {'snapshot': True, 'slave_okay': True, 'timeout': True}) def test_repeated_iteration(self): """Ensure that QuerySet rewinds itself one iteration finishes. """ self.Person(name='Person 1').save() self.Person(name='Person 2').save() queryset = self.Person.objects people1 = [person for person in queryset] people2 = [person for person in queryset] # Check that it still works even if iteration is interrupted. for person in queryset: break people3 = [person for person in queryset] self.assertEqual(people1, people2) self.assertEqual(people1, people3) def test_repr(self): """Test repr behavior isnt destructive""" class Doc(Document): number = IntField() def __repr__(self): return "" % self.number Doc.drop_collection() for i in xrange(1000): Doc(number=i).save() docs = Doc.objects.order_by('number') self.assertEquals(docs.count(), 1000) self.assertEquals(len(docs), 1000) docs_string = "%s" % docs self.assertTrue("Doc: 0" in docs_string) self.assertEquals(docs.count(), 1000) self.assertEquals(len(docs), 1000) # Limit and skip self.assertEquals('[, , ]', "%s" % docs[1:4]) self.assertEquals(docs.count(), 3) self.assertEquals(len(docs), 3) for doc in docs: self.assertEqual('.. queryset mid-iteration ..', repr(docs)) def test_regex_query_shortcuts(self): """Ensure that contains, startswith, endswith, etc work. """ person = self.Person(name='Guido van Rossum') person.save() # Test contains obj = self.Person.objects(name__contains='van').first() self.assertEqual(obj, person) obj = self.Person.objects(name__contains='Van').first() self.assertEqual(obj, None) obj = self.Person.objects(Q(name__contains='van')).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__contains='Van')).first() self.assertEqual(obj, None) # Test icontains obj = self.Person.objects(name__icontains='Van').first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__icontains='Van')).first() self.assertEqual(obj, person) # Test startswith obj = self.Person.objects(name__startswith='Guido').first() self.assertEqual(obj, person) obj = self.Person.objects(name__startswith='guido').first() self.assertEqual(obj, None) obj = self.Person.objects(Q(name__startswith='Guido')).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__startswith='guido')).first() self.assertEqual(obj, None) # Test istartswith obj = self.Person.objects(name__istartswith='guido').first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__istartswith='guido')).first() self.assertEqual(obj, person) # Test endswith obj = self.Person.objects(name__endswith='Rossum').first() self.assertEqual(obj, person) obj = self.Person.objects(name__endswith='rossuM').first() self.assertEqual(obj, None) obj = self.Person.objects(Q(name__endswith='Rossum')).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__endswith='rossuM')).first() self.assertEqual(obj, None) # Test iendswith obj = self.Person.objects(name__iendswith='rossuM').first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__iendswith='rossuM')).first() self.assertEqual(obj, person) # Test exact obj = self.Person.objects(name__exact='Guido van Rossum').first() self.assertEqual(obj, person) obj = self.Person.objects(name__exact='Guido van rossum').first() self.assertEqual(obj, None) obj = self.Person.objects(name__exact='Guido van Rossu').first() self.assertEqual(obj, None) obj = self.Person.objects(Q(name__exact='Guido van Rossum')).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__exact='Guido van rossum')).first() self.assertEqual(obj, None) obj = self.Person.objects(Q(name__exact='Guido van Rossu')).first() self.assertEqual(obj, None) # Test iexact obj = self.Person.objects(name__iexact='gUIDO VAN rOSSUM').first() self.assertEqual(obj, person) obj = self.Person.objects(name__iexact='gUIDO VAN rOSSU').first() self.assertEqual(obj, None) obj = self.Person.objects(Q(name__iexact='gUIDO VAN rOSSUM')).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__iexact='gUIDO VAN rOSSU')).first() self.assertEqual(obj, None) # Test unsafe expressions person = self.Person(name='Guido van Rossum [.\'Geek\']') person.save() obj = self.Person.objects(Q(name__icontains='[.\'Geek')).first() self.assertEqual(obj, person) def test_not(self): """Ensure that the __not operator works as expected. """ alice = self.Person(name='Alice', age=25) alice.save() obj = self.Person.objects(name__iexact='alice').first() self.assertEqual(obj, alice) obj = self.Person.objects(name__not__iexact='alice').first() self.assertEqual(obj, None) def test_filter_chaining(self): """Ensure filters can be chained together. """ class BlogPost(Document): title = StringField() is_published = BooleanField() published_date = DateTimeField() @queryset_manager def published(doc_cls, queryset): return queryset(is_published=True) blog_post_1 = BlogPost(title="Blog Post #1", is_published = True, published_date=datetime(2010, 1, 5, 0, 0 ,0)) blog_post_2 = BlogPost(title="Blog Post #2", is_published = True, published_date=datetime(2010, 1, 6, 0, 0 ,0)) blog_post_3 = BlogPost(title="Blog Post #3", is_published = True, published_date=datetime(2010, 1, 7, 0, 0 ,0)) blog_post_1.save() blog_post_2.save() blog_post_3.save() # find all published blog posts before 2010-01-07 published_posts = BlogPost.published() published_posts = published_posts.filter( published_date__lt=datetime(2010, 1, 7, 0, 0 ,0)) self.assertEqual(published_posts.count(), 2) BlogPost.drop_collection() def test_ordering(self): """Ensure default ordering is applied and can be overridden. """ class BlogPost(Document): title = StringField() published_date = DateTimeField() meta = { 'ordering': ['-published_date'] } BlogPost.drop_collection() blog_post_1 = BlogPost(title="Blog Post #1", published_date=datetime(2010, 1, 5, 0, 0 ,0)) blog_post_2 = BlogPost(title="Blog Post #2", published_date=datetime(2010, 1, 6, 0, 0 ,0)) blog_post_3 = BlogPost(title="Blog Post #3", published_date=datetime(2010, 1, 7, 0, 0 ,0)) blog_post_1.save() blog_post_2.save() blog_post_3.save() # get the "first" BlogPost using default ordering # from BlogPost.meta.ordering latest_post = BlogPost.objects.first() self.assertEqual(latest_post.title, "Blog Post #3") # override default ordering, order BlogPosts by "published_date" first_post = BlogPost.objects.order_by("+published_date").first() self.assertEqual(first_post.title, "Blog Post #1") BlogPost.drop_collection() def test_only(self): """Ensure that QuerySet.only only returns the requested fields. """ person = self.Person(name='test', age=25) person.save() obj = self.Person.objects.only('name').get() self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, None) obj = self.Person.objects.only('age').get() self.assertEqual(obj.name, None) self.assertEqual(obj.age, person.age) obj = self.Person.objects.only('name', 'age').get() self.assertEqual(obj.name, person.name) self.assertEqual(obj.age, person.age) # Check polymorphism still works class Employee(self.Person): salary = IntField(db_field='wage') employee = Employee(name='test employee', age=40, salary=30000) employee.save() obj = self.Person.objects(id=employee.id).only('age').get() self.assertTrue(isinstance(obj, Employee)) # Check field names are looked up properly obj = Employee.objects(id=employee.id).only('salary').get() self.assertEqual(obj.salary, employee.salary) self.assertEqual(obj.name, None) def test_only_with_subfields(self): class User(EmbeddedDocument): name = StringField() email = StringField() class Comment(EmbeddedDocument): title = StringField() text = StringField() class BlogPost(Document): content = StringField() author = EmbeddedDocumentField(User) comments = ListField(EmbeddedDocumentField(Comment)) BlogPost.drop_collection() post = BlogPost(content='Had a good coffee today...') post.author = User(name='Test User') post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] post.save() obj = BlogPost.objects.only('author.name',).get() self.assertEqual(obj.content, None) self.assertEqual(obj.author.email, None) self.assertEqual(obj.author.name, 'Test User') self.assertEqual(obj.comments, []) obj = BlogPost.objects.only('content', 'comments.title',).get() self.assertEqual(obj.content, 'Had a good coffee today...') self.assertEqual(obj.author, None) self.assertEqual(obj.comments[0].title, 'I aggree') self.assertEqual(obj.comments[1].title, 'Coffee') self.assertEqual(obj.comments[0].text, None) self.assertEqual(obj.comments[1].text, None) obj = BlogPost.objects.only('comments',).get() self.assertEqual(obj.content, None) self.assertEqual(obj.author, None) self.assertEqual(obj.comments[0].title, 'I aggree') self.assertEqual(obj.comments[1].title, 'Coffee') self.assertEqual(obj.comments[0].text, 'Great post!') self.assertEqual(obj.comments[1].text, 'I hate coffee') BlogPost.drop_collection() def test_exclude(self): class User(EmbeddedDocument): name = StringField() email = StringField() class Comment(EmbeddedDocument): title = StringField() text = StringField() class BlogPost(Document): content = StringField() author = EmbeddedDocumentField(User) comments = ListField(EmbeddedDocumentField(Comment)) BlogPost.drop_collection() post = BlogPost(content='Had a good coffee today...') post.author = User(name='Test User') post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] post.save() obj = BlogPost.objects.exclude('author', 'comments.text').get() self.assertEqual(obj.author, None) self.assertEqual(obj.content, 'Had a good coffee today...') self.assertEqual(obj.comments[0].title, 'I aggree') self.assertEqual(obj.comments[0].text, None) BlogPost.drop_collection() def test_exclude_only_combining(self): class Attachment(EmbeddedDocument): name = StringField() content = StringField() class Email(Document): sender = StringField() to = StringField() subject = StringField() body = StringField() content_type = StringField() attachments = ListField(EmbeddedDocumentField(Attachment)) Email.drop_collection() email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') email.attachments = [ Attachment(name='file1.doc', content='ABC'), Attachment(name='file2.doc', content='XYZ'), ] email.save() obj = Email.objects.exclude('content_type').exclude('body').get() self.assertEqual(obj.sender, 'me') self.assertEqual(obj.to, 'you') self.assertEqual(obj.subject, 'From Russia with Love') self.assertEqual(obj.body, None) self.assertEqual(obj.content_type, None) obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() self.assertEqual(obj.sender, None) self.assertEqual(obj.to, 'you') self.assertEqual(obj.subject, None) self.assertEqual(obj.body, None) self.assertEqual(obj.content_type, None) obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() self.assertEqual(obj.attachments[0].name, 'file1.doc') self.assertEqual(obj.attachments[0].content, None) self.assertEqual(obj.sender, None) self.assertEqual(obj.to, 'you') self.assertEqual(obj.subject, None) self.assertEqual(obj.body, None) self.assertEqual(obj.content_type, None) Email.drop_collection() def test_all_fields(self): class Email(Document): sender = StringField() to = StringField() subject = StringField() body = StringField() content_type = StringField() Email.drop_collection() email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') email.save() obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() self.assertEqual(obj.sender, 'me') self.assertEqual(obj.to, 'you') self.assertEqual(obj.subject, 'From Russia with Love') self.assertEqual(obj.body, 'Hello!') self.assertEqual(obj.content_type, 'text/plain') Email.drop_collection() def test_slicing_fields(self): """Ensure that query slicing an array works. """ class Numbers(Document): n = ListField(IntField()) Numbers.drop_collection() numbers = Numbers(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) numbers.save() # first three numbers = Numbers.objects.fields(slice__n=3).get() self.assertEquals(numbers.n, [0, 1, 2]) # last three numbers = Numbers.objects.fields(slice__n=-3).get() self.assertEquals(numbers.n, [-3, -2, -1]) # skip 2, limit 3 numbers = Numbers.objects.fields(slice__n=[2, 3]).get() self.assertEquals(numbers.n, [2, 3, 4]) # skip to fifth from last, limit 4 numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() self.assertEquals(numbers.n, [-5, -4, -3, -2]) # skip to fifth from last, limit 10 numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() self.assertEquals(numbers.n, [-5, -4, -3, -2, -1]) # skip to fifth from last, limit 10 dict method numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() self.assertEquals(numbers.n, [-5, -4, -3, -2, -1]) def test_slicing_nested_fields(self): """Ensure that query slicing an embedded array works. """ class EmbeddedNumber(EmbeddedDocument): n = ListField(IntField()) class Numbers(Document): embedded = EmbeddedDocumentField(EmbeddedNumber) Numbers.drop_collection() numbers = Numbers() numbers.embedded = EmbeddedNumber(n=[0,1,2,3,4,5,-5,-4,-3,-2,-1]) numbers.save() # first three numbers = Numbers.objects.fields(slice__embedded__n=3).get() self.assertEquals(numbers.embedded.n, [0, 1, 2]) # last three numbers = Numbers.objects.fields(slice__embedded__n=-3).get() self.assertEquals(numbers.embedded.n, [-3, -2, -1]) # skip 2, limit 3 numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() self.assertEquals(numbers.embedded.n, [2, 3, 4]) # skip to fifth from last, limit 4 numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2]) # skip to fifth from last, limit 10 numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1]) # skip to fifth from last, limit 10 dict method numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1]) def test_find_embedded(self): """Ensure that an embedded document is properly returned from a query. """ class User(EmbeddedDocument): name = StringField() class BlogPost(Document): content = StringField() author = EmbeddedDocumentField(User) BlogPost.drop_collection() post = BlogPost(content='Had a good coffee today...') post.author = User(name='Test User') post.save() result = BlogPost.objects.first() self.assertTrue(isinstance(result.author, User)) self.assertEqual(result.author.name, 'Test User') BlogPost.drop_collection() def test_find_dict_item(self): """Ensure that DictField items may be found. """ class BlogPost(Document): info = DictField() BlogPost.drop_collection() post = BlogPost(info={'title': 'test'}) post.save() post_obj = BlogPost.objects(info__title='test').first() self.assertEqual(post_obj.id, post.id) BlogPost.drop_collection() def test_q(self): """Ensure that Q objects may be used to query for documents. """ class BlogPost(Document): title = StringField() publish_date = DateTimeField() published = BooleanField() BlogPost.drop_collection() post1 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 8), published=False) post1.save() post2 = BlogPost(title='Test 2', publish_date=datetime(2010, 1, 15), published=True) post2.save() post3 = BlogPost(title='Test 3', published=True) post3.save() post4 = BlogPost(title='Test 4', publish_date=datetime(2010, 1, 8)) post4.save() post5 = BlogPost(title='Test 1', publish_date=datetime(2010, 1, 15)) post5.save() post6 = BlogPost(title='Test 1', published=False) post6.save() # Check ObjectId lookup works obj = BlogPost.objects(id=post1.id).first() self.assertEqual(obj, post1) # Check Q object combination with one does not exist q = BlogPost.objects(Q(title='Test 5') | Q(published=True)) posts = [post.id for post in q] published_posts = (post2, post3) self.assertTrue(all(obj.id in posts for obj in published_posts)) q = BlogPost.objects(Q(title='Test 1') | Q(published=True)) posts = [post.id for post in q] published_posts = (post1, post2, post3, post5, post6) self.assertTrue(all(obj.id in posts for obj in published_posts)) # Check Q object combination date = datetime(2010, 1, 10) q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) posts = [post.id for post in q] published_posts = (post1, post2, post3, post4) self.assertTrue(all(obj.id in posts for obj in published_posts)) self.assertFalse(any(obj.id in posts for obj in [post5, post6])) BlogPost.drop_collection() # Check the 'in' operator self.Person(name='user1', age=20).save() self.Person(name='user2', age=20).save() self.Person(name='user3', age=30).save() self.Person(name='user4', age=40).save() self.assertEqual(len(self.Person.objects(Q(age__in=[20]))), 2) self.assertEqual(len(self.Person.objects(Q(age__in=[20, 30]))), 3) def test_q_regex(self): """Ensure that Q objects can be queried using regexes. """ person = self.Person(name='Guido van Rossum') person.save() import re obj = self.Person.objects(Q(name=re.compile('^Gui'))).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name=re.compile('^gui'))).first() self.assertEqual(obj, None) obj = self.Person.objects(Q(name=re.compile('^gui', re.I))).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__not=re.compile('^bob'))).first() self.assertEqual(obj, person) obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() self.assertEqual(obj, None) def test_q_lists(self): """Ensure that Q objects query ListFields correctly. """ class BlogPost(Document): tags = ListField(StringField()) BlogPost.drop_collection() BlogPost(tags=['python', 'mongo']).save() BlogPost(tags=['python']).save() self.assertEqual(len(BlogPost.objects(Q(tags='mongo'))), 1) self.assertEqual(len(BlogPost.objects(Q(tags='python'))), 2) BlogPost.drop_collection() def test_exec_js_query(self): """Ensure that queries are properly formed for use in exec_js. """ class BlogPost(Document): hits = IntField() published = BooleanField() BlogPost.drop_collection() post1 = BlogPost(hits=1, published=False) post1.save() post2 = BlogPost(hits=1, published=True) post2.save() post3 = BlogPost(hits=1, published=True) post3.save() js_func = """ function(hitsField) { var count = 0; db[collection].find(query).forEach(function(doc) { count += doc[hitsField]; }); return count; } """ # Ensure that normal queries work c = BlogPost.objects(published=True).exec_js(js_func, 'hits') self.assertEqual(c, 2) c = BlogPost.objects(published=False).exec_js(js_func, 'hits') self.assertEqual(c, 1) # Ensure that Q object queries work c = BlogPost.objects(Q(published=True)).exec_js(js_func, 'hits') self.assertEqual(c, 2) c = BlogPost.objects(Q(published=False)).exec_js(js_func, 'hits') self.assertEqual(c, 1) BlogPost.drop_collection() def test_exec_js_field_sub(self): """Ensure that field substitutions occur properly in exec_js functions. """ class Comment(EmbeddedDocument): content = StringField(db_field='body') class BlogPost(Document): name = StringField(db_field='doc-name') comments = ListField(EmbeddedDocumentField(Comment), db_field='cmnts') BlogPost.drop_collection() comments1 = [Comment(content='cool'), Comment(content='yay')] post1 = BlogPost(name='post1', comments=comments1) post1.save() comments2 = [Comment(content='nice stuff')] post2 = BlogPost(name='post2', comments=comments2) post2.save() code = """ function getComments() { var comments = []; db[collection].find(query).forEach(function(doc) { var docComments = doc[~comments]; for (var i = 0; i < docComments.length; i++) { comments.push({ 'document': doc[~name], 'comment': doc[~comments][i][~comments.content] }); } }); return comments; } """ sub_code = BlogPost.objects._sub_js_fields(code) code_chunks = ['doc["cmnts"];', 'doc["doc-name"],', 'doc["cmnts"][i]["body"]'] for chunk in code_chunks: self.assertTrue(chunk in sub_code) results = BlogPost.objects.exec_js(code) expected_results = [ {u'comment': u'cool', u'document': u'post1'}, {u'comment': u'yay', u'document': u'post1'}, {u'comment': u'nice stuff', u'document': u'post2'}, ] self.assertEqual(results, expected_results) # Test template style code = "{{~comments.content}}" sub_code = BlogPost.objects._sub_js_fields(code) self.assertEquals("cmnts.body", sub_code) BlogPost.drop_collection() def test_delete(self): """Ensure that documents are properly deleted from the database. """ self.Person(name="User A", age=20).save() self.Person(name="User B", age=30).save() self.Person(name="User C", age=40).save() self.assertEqual(len(self.Person.objects), 3) self.Person.objects(age__lt=30).delete() self.assertEqual(len(self.Person.objects), 2) self.Person.objects.delete() self.assertEqual(len(self.Person.objects), 0) def test_reverse_delete_rule_cascade(self): """Ensure cascading deletion of referring documents from the database. """ class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) BlogPost.drop_collection() me = self.Person(name='Test User') me.save() someoneelse = self.Person(name='Some-one Else') someoneelse.save() BlogPost(content='Watching TV', author=me).save() BlogPost(content='Chilling out', author=me).save() BlogPost(content='Pro Testing', author=someoneelse).save() self.assertEqual(3, BlogPost.objects.count()) self.Person.objects(name='Test User').delete() self.assertEqual(1, BlogPost.objects.count()) def test_reverse_delete_rule_cascade_self_referencing(self): """Ensure self-referencing CASCADE deletes do not result in infinite loop """ class Category(Document): name = StringField() parent = ReferenceField('self', reverse_delete_rule=CASCADE) num_children = 3 base = Category(name='Root') base.save() # Create a simple parent-child tree for i in range(num_children): child_name = 'Child-%i' % i child = Category(name=child_name, parent=base) child.save() for i in range(num_children): child_child_name = 'Child-Child-%i' % i child_child = Category(name=child_child_name, parent=child) child_child.save() tree_size = 1 + num_children + (num_children * num_children) self.assertEquals(tree_size, Category.objects.count()) self.assertEquals(num_children, Category.objects(parent=base).count()) # The delete should effectively wipe out the Category collection # without resulting in infinite parent-child cascade recursion base.delete() self.assertEquals(0, Category.objects.count()) def test_reverse_delete_rule_nullify(self): """Ensure nullification of references to deleted documents. """ class Category(Document): name = StringField() class BlogPost(Document): content = StringField() category = ReferenceField(Category, reverse_delete_rule=NULLIFY) BlogPost.drop_collection() Category.drop_collection() lameness = Category(name='Lameness') lameness.save() post = BlogPost(content='Watching TV', category=lameness) post.save() self.assertEqual(1, BlogPost.objects.count()) self.assertEqual('Lameness', BlogPost.objects.first().category.name) Category.objects.delete() self.assertEqual(1, BlogPost.objects.count()) self.assertEqual(None, BlogPost.objects.first().category) def test_reverse_delete_rule_deny(self): """Ensure deletion gets denied on documents that still have references to them. """ class BlogPost(Document): content = StringField() author = ReferenceField(self.Person, reverse_delete_rule=DENY) BlogPost.drop_collection() self.Person.drop_collection() me = self.Person(name='Test User') me.save() post = BlogPost(content='Watching TV', author=me) post.save() self.assertRaises(OperationError, self.Person.objects.delete) def test_reverse_delete_rule_pull(self): """Ensure pulling of references to deleted documents. """ class BlogPost(Document): content = StringField() authors = ListField(ReferenceField(self.Person, reverse_delete_rule=PULL)) BlogPost.drop_collection() self.Person.drop_collection() me = self.Person(name='Test User') me.save() someoneelse = self.Person(name='Some-one Else') someoneelse.save() post = BlogPost(content='Watching TV', authors=[me, someoneelse]) post.save() another = BlogPost(content='Chilling Out', authors=[someoneelse]) another.save() someoneelse.delete() post.reload() another.reload() self.assertEqual(post.authors, [me]) self.assertEqual(another.authors, []) def test_update(self): """Ensure that atomic updates work properly. """ class BlogPost(Document): title = StringField() hits = IntField() tags = ListField(StringField()) BlogPost.drop_collection() post = BlogPost(name="Test Post", hits=5, tags=['test']) post.save() BlogPost.objects.update(set__hits=10) post.reload() self.assertEqual(post.hits, 10) BlogPost.objects.update_one(inc__hits=1) post.reload() self.assertEqual(post.hits, 11) BlogPost.objects.update_one(dec__hits=1) post.reload() self.assertEqual(post.hits, 10) BlogPost.objects.update(push__tags='mongo') post.reload() self.assertTrue('mongo' in post.tags) BlogPost.objects.update_one(push_all__tags=['db', 'nosql']) post.reload() self.assertTrue('db' in post.tags and 'nosql' in post.tags) tags = post.tags[:-1] BlogPost.objects.update(pop__tags=1) post.reload() self.assertEqual(post.tags, tags) BlogPost.objects.update_one(add_to_set__tags='unique') BlogPost.objects.update_one(add_to_set__tags='unique') post.reload() self.assertEqual(post.tags.count('unique'), 1) self.assertNotEqual(post.hits, None) BlogPost.objects.update_one(unset__hits=1) post.reload() self.assertEqual(post.hits, None) BlogPost.drop_collection() def test_update_push_and_pull(self): """Ensure that the 'pull' update operation works correctly. """ class BlogPost(Document): slug = StringField() tags = ListField(StringField()) BlogPost.drop_collection() post = BlogPost(slug="test") post.save() BlogPost.objects.filter(id=post.id).update(push__tags="code") post.reload() self.assertEqual(post.tags, ["code"]) BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"]) post.reload() self.assertEqual(post.tags, ["code", "mongodb", "code"]) BlogPost.objects(slug="test").update(pull__tags="code") post.reload() self.assertEqual(post.tags, ["mongodb"]) BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"]) post.reload() self.assertEqual(post.tags, []) BlogPost.objects(slug="test").update(__raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}}) post.reload() self.assertEqual(post.tags, ["code", "mongodb"]) def test_pull_nested(self): class User(Document): name = StringField() class Collaborator(EmbeddedDocument): user = StringField() def __unicode__(self): return '%s' % self.user class Site(Document): name = StringField(max_length=75, unique=True, required=True) collaborators = ListField(EmbeddedDocumentField(Collaborator)) Site.drop_collection() c = Collaborator(user='Esteban') s = Site(name="test", collaborators=[c]) s.save() Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') self.assertEqual(Site.objects.first().collaborators, []) def pull_all(): Site.objects(id=s.id).update_one(pull_all__collaborators__user=['Ross']) self.assertRaises(InvalidQueryError, pull_all) def test_update_one_pop_generic_reference(self): class BlogTag(Document): name = StringField(required=True) class BlogPost(Document): slug = StringField() tags = ListField(ReferenceField(BlogTag), required=True) BlogPost.drop_collection() BlogTag.drop_collection() tag_1 = BlogTag(name='code') tag_1.save() tag_2 = BlogTag(name='mongodb') tag_2.save() post = BlogPost(slug="test", tags=[tag_1]) post.save() post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) post.save() self.assertEqual(len(post.tags), 2) BlogPost.objects(slug="test-2").update_one(pop__tags=-1) post.reload() self.assertEqual(len(post.tags), 1) BlogPost.drop_collection() BlogTag.drop_collection() def test_editting_embedded_objects(self): class BlogTag(EmbeddedDocument): name = StringField(required=True) class BlogPost(Document): slug = StringField() tags = ListField(EmbeddedDocumentField(BlogTag), required=True) BlogPost.drop_collection() tag_1 = BlogTag(name='code') tag_2 = BlogTag(name='mongodb') post = BlogPost(slug="test", tags=[tag_1]) post.save() post = BlogPost(slug="test-2", tags=[tag_1, tag_2]) post.save() self.assertEqual(len(post.tags), 2) BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python") post.reload() self.assertEquals(post.tags[0].name, 'python') BlogPost.objects(slug="test-2").update_one(pop__tags=-1) post.reload() self.assertEqual(len(post.tags), 1) BlogPost.drop_collection() def test_set_list_embedded_documents(self): class Author(EmbeddedDocument): name = StringField() class Message(Document): title = StringField() authors = ListField(EmbeddedDocumentField('Author')) Message.drop_collection() message = Message(title="hello", authors=[Author(name="Harry")]) message.save() Message.objects(authors__name="Harry").update_one( set__authors__S=Author(name="Ross")) message = message.reload() self.assertEquals(message.authors[0].name, "Ross") Message.objects(authors__name="Ross").update_one( set__authors=[Author(name="Harry"), Author(name="Ross"), Author(name="Adam")]) message = message.reload() self.assertEquals(message.authors[0].name, "Harry") self.assertEquals(message.authors[1].name, "Ross") self.assertEquals(message.authors[2].name, "Adam") def test_order_by(self): """Ensure that QuerySets may be ordered. """ self.Person(name="User A", age=20).save() self.Person(name="User B", age=40).save() self.Person(name="User C", age=30).save() names = [p.name for p in self.Person.objects.order_by('-age')] self.assertEqual(names, ['User B', 'User C', 'User A']) names = [p.name for p in self.Person.objects.order_by('+age')] self.assertEqual(names, ['User A', 'User C', 'User B']) names = [p.name for p in self.Person.objects.order_by('age')] self.assertEqual(names, ['User A', 'User C', 'User B']) ages = [p.age for p in self.Person.objects.order_by('-name')] self.assertEqual(ages, [30, 40, 20]) def test_confirm_order_by_reference_wont_work(self): """Ordering by reference is not possible. Use map / reduce.. or denormalise""" class Author(Document): author = ReferenceField(self.Person) Author.drop_collection() person_a = self.Person(name="User A", age=20) person_a.save() person_b = self.Person(name="User B", age=40) person_b.save() person_c = self.Person(name="User C", age=30) person_c.save() Author(author=person_a).save() Author(author=person_b).save() Author(author=person_c).save() names = [a.author.name for a in Author.objects.order_by('-author__age')] self.assertEqual(names, ['User A', 'User B', 'User C']) def test_map_reduce(self): """Ensure map/reduce is both mapping and reducing. """ class BlogPost(Document): title = StringField() tags = ListField(StringField(), db_field='post-tag-list') BlogPost.drop_collection() BlogPost(title="Post #1", tags=['music', 'film', 'print']).save() BlogPost(title="Post #2", tags=['music', 'film']).save() BlogPost(title="Post #3", tags=['film', 'photography']).save() map_f = """ function() { this[~tags].forEach(function(tag) { emit(tag, 1); }); } """ reduce_f = """ function(key, values) { var total = 0; for(var i=0; i 0) { y = 1; } else if (x = 0) { y = 0; } else { y = -1; } // calculate 'Z', the maximal value if(Math.abs(x) >= 1) { z = Math.abs(x); } else { z = 1; } return {x: x, y: y, z: z, t_s: sec_since_epoch}; } """ finalize_f = """ function(key, value) { // f(sec_since_epoch,y,z) = // log10(z) + ((y*sec_since_epoch) / 45000) z_10 = Math.log(value.z) / Math.log(10); weight = z_10 + ((value.y * value.t_s) / 45000); return weight; } """ # provide the reddit epoch (used for ranking) as a variable available # to all phases of the map/reduce operation: map, reduce, and finalize. reddit_epoch = mktime(datetime(2005, 12, 8, 7, 46, 43).timetuple()) scope = {'reddit_epoch': reddit_epoch} # run a map/reduce operation across all links. ordering is set # to "-value", which orders the "weight" value returned from # "finalize_f" in descending order. results = Link.objects.order_by("-value") results = results.map_reduce(map_f, reduce_f, "myresults", finalize_f=finalize_f, scope=scope) results = list(results) # assert troublesome Buzz article is ranked 1st self.assertTrue(results[0].object.title.startswith("Google Buzz")) # assert laser vision is ranked last self.assertTrue(results[-1].object.title.startswith("How to see")) Link.drop_collection() def test_item_frequencies(self): """Ensure that item frequencies are properly generated from lists. """ class BlogPost(Document): hits = IntField() tags = ListField(StringField(), db_field='blogTags') BlogPost.drop_collection() BlogPost(hits=1, tags=['music', 'film', 'actors', 'watch']).save() BlogPost(hits=2, tags=['music', 'watch']).save() BlogPost(hits=2, tags=['music', 'actors']).save() def test_assertions(f): f = dict((key, int(val)) for key, val in f.items()) self.assertEqual(set(['music', 'film', 'actors', 'watch']), set(f.keys())) self.assertEqual(f['music'], 3) self.assertEqual(f['actors'], 2) self.assertEqual(f['watch'], 2) self.assertEqual(f['film'], 1) exec_js = BlogPost.objects.item_frequencies('tags') map_reduce = BlogPost.objects.item_frequencies('tags', map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Ensure query is taken into account def test_assertions(f): f = dict((key, int(val)) for key, val in f.items()) self.assertEqual(set(['music', 'actors', 'watch']), set(f.keys())) self.assertEqual(f['music'], 2) self.assertEqual(f['actors'], 1) self.assertEqual(f['watch'], 1) exec_js = BlogPost.objects(hits__gt=1).item_frequencies('tags') map_reduce = BlogPost.objects(hits__gt=1).item_frequencies('tags', map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Check that normalization works def test_assertions(f): self.assertAlmostEqual(f['music'], 3.0/8.0) self.assertAlmostEqual(f['actors'], 2.0/8.0) self.assertAlmostEqual(f['watch'], 2.0/8.0) self.assertAlmostEqual(f['film'], 1.0/8.0) exec_js = BlogPost.objects.item_frequencies('tags', normalize=True) map_reduce = BlogPost.objects.item_frequencies('tags', normalize=True, map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Check item_frequencies works for non-list fields def test_assertions(f): self.assertEqual(set(['1', '2']), set(f.keys())) self.assertEqual(f['1'], 1) self.assertEqual(f['2'], 2) exec_js = BlogPost.objects.item_frequencies('hits') map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) BlogPost.drop_collection() def test_item_frequencies_on_embedded(self): """Ensure that item frequencies are properly generated from lists. """ class Phone(EmbeddedDocument): number = StringField() class Person(Document): name = StringField() phone = EmbeddedDocumentField(Phone) Person.drop_collection() doc = Person(name="Guido") doc.phone = Phone(number='62-3331-1656') doc.save() doc = Person(name="Marr") doc.phone = Phone(number='62-3331-1656') doc.save() doc = Person(name="WP Junior") doc.phone = Phone(number='62-3332-1656') doc.save() def test_assertions(f): f = dict((key, int(val)) for key, val in f.items()) self.assertEqual(set(['62-3331-1656', '62-3332-1656']), set(f.keys())) self.assertEqual(f['62-3331-1656'], 2) self.assertEqual(f['62-3332-1656'], 1) exec_js = Person.objects.item_frequencies('phone.number') map_reduce = Person.objects.item_frequencies('phone.number', map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Ensure query is taken into account def test_assertions(f): f = dict((key, int(val)) for key, val in f.items()) self.assertEqual(set(['62-3331-1656']), set(f.keys())) self.assertEqual(f['62-3331-1656'], 2) exec_js = Person.objects(phone__number='62-3331-1656').item_frequencies('phone.number') map_reduce = Person.objects(phone__number='62-3331-1656').item_frequencies('phone.number', map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) # Check that normalization works def test_assertions(f): self.assertEqual(f['62-3331-1656'], 2.0/3.0) self.assertEqual(f['62-3332-1656'], 1.0/3.0) exec_js = Person.objects.item_frequencies('phone.number', normalize=True) map_reduce = Person.objects.item_frequencies('phone.number', normalize=True, map_reduce=True) test_assertions(exec_js) test_assertions(map_reduce) def test_item_frequencies_null_values(self): class Person(Document): name = StringField() city = StringField() Person.drop_collection() Person(name="Wilson Snr", city="CRB").save() Person(name="Wilson Jr").save() freq = Person.objects.item_frequencies('city') self.assertEquals(freq, {'CRB': 1.0, None: 1.0}) freq = Person.objects.item_frequencies('city', normalize=True) self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) freq = Person.objects.item_frequencies('city', map_reduce=True) self.assertEquals(freq, {'CRB': 1.0, None: 1.0}) freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True) self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) def test_item_frequencies_with_null_embedded(self): class Data(EmbeddedDocument): name = StringField() class Extra(EmbeddedDocument): tag = StringField() class Person(Document): data = EmbeddedDocumentField(Data, required=True) extra = EmbeddedDocumentField(Extra) Person.drop_collection() p = Person() p.data = Data(name="Wilson Jr") p.save() p = Person() p.data = Data(name="Wesley") p.extra = Extra(tag="friend") p.save() ot = Person.objects.item_frequencies('extra.tag', map_reduce=False) self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) def test_average(self): """Ensure that field can be averaged correctly. """ self.Person(name='person', age=0).save() self.assertEqual(int(self.Person.objects.average('age')), 0) ages = [23, 54, 12, 94, 27] for i, age in enumerate(ages): self.Person(name='test%s' % i, age=age).save() avg = float(sum(ages)) / (len(ages) + 1) # take into account the 0 self.assertAlmostEqual(int(self.Person.objects.average('age')), avg) self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.average('age')), avg) def test_sum(self): """Ensure that field can be summed over correctly. """ ages = [23, 54, 12, 94, 27] for i, age in enumerate(ages): self.Person(name='test%s' % i, age=age).save() self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) self.Person(name='ageless person').save() self.assertEqual(int(self.Person.objects.sum('age')), sum(ages)) def test_distinct(self): """Ensure that the QuerySet.distinct method works. """ self.Person(name='Mr Orange', age=20).save() self.Person(name='Mr White', age=20).save() self.Person(name='Mr Orange', age=30).save() self.Person(name='Mr Pink', age=30).save() self.assertEqual(set(self.Person.objects.distinct('name')), set(['Mr Orange', 'Mr White', 'Mr Pink'])) self.assertEqual(set(self.Person.objects.distinct('age')), set([20, 30])) self.assertEqual(set(self.Person.objects(age=30).distinct('name')), set(['Mr Orange', 'Mr Pink'])) def test_distinct_handles_references(self): class Foo(Document): bar = ReferenceField("Bar") class Bar(Document): text = StringField() Bar.drop_collection() Foo.drop_collection() bar = Bar(text="hi") bar.save() foo = Foo(bar=bar) foo.save() self.assertEquals(Foo.objects.distinct("bar"), [bar]) def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected. """ class BlogPost(Document): tags = ListField(StringField()) deleted = BooleanField(default=False) date = DateTimeField(default=datetime.now) @queryset_manager def objects(doc_cls, queryset): return queryset(deleted=False) @queryset_manager def music_posts(doc_cls, queryset): return queryset(tags='music', deleted=False).order_by('-date') BlogPost.drop_collection() post1 = BlogPost(tags=['music', 'film']) post1.save() post2 = BlogPost(tags=['music']) post2.save() post3 = BlogPost(tags=['film', 'actors']) post3.save() post4 = BlogPost(tags=['film', 'actors'], deleted=True) post4.save() self.assertEqual([p.id for p in BlogPost.objects], [post1.id, post2.id, post3.id]) self.assertEqual([p.id for p in BlogPost.music_posts], [post2.id, post1.id]) BlogPost.drop_collection() def test_query_field_name(self): """Ensure that the correct field name is used when querying. """ class Comment(EmbeddedDocument): content = StringField(db_field='commentContent') class BlogPost(Document): title = StringField(db_field='postTitle') comments = ListField(EmbeddedDocumentField(Comment), db_field='postComments') BlogPost.drop_collection() data = {'title': 'Post 1', 'comments': [Comment(content='test')]} post = BlogPost(**data) post.save() self.assertTrue('postTitle' in BlogPost.objects(title=data['title'])._query) self.assertFalse('title' in BlogPost.objects(title=data['title'])._query) self.assertEqual(len(BlogPost.objects(title=data['title'])), 1) self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query) self.assertEqual(len(BlogPost.objects(pk=post.id)), 1) self.assertTrue('postComments.commentContent' in BlogPost.objects(comments__content='test')._query) self.assertEqual(len(BlogPost.objects(comments__content='test')), 1) BlogPost.drop_collection() def test_query_pk_field_name(self): """Ensure that the correct "primary key" field name is used when querying """ class BlogPost(Document): title = StringField(primary_key=True, db_field='postTitle') BlogPost.drop_collection() data = { 'title':'Post 1' } post = BlogPost(**data) post.save() self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query) self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query) self.assertEqual(len(BlogPost.objects(pk=data['title'])), 1) BlogPost.drop_collection() def test_query_value_conversion(self): """Ensure that query values are properly converted when necessary. """ class BlogPost(Document): author = ReferenceField(self.Person) BlogPost.drop_collection() person = self.Person(name='test', age=30) person.save() post = BlogPost(author=person) post.save() # Test that query may be performed by providing a document as a value # while using a ReferenceField's name - the document should be # converted to an DBRef, which is legal, unlike a Document object post_obj = BlogPost.objects(author=person).first() self.assertEqual(post.id, post_obj.id) # Test that lists of values work when using the 'in', 'nin' and 'all' post_obj = BlogPost.objects(author__in=[person]).first() self.assertEqual(post.id, post_obj.id) BlogPost.drop_collection() def test_update_value_conversion(self): """Ensure that values used in updates are converted before use. """ class Group(Document): members = ListField(ReferenceField(self.Person)) Group.drop_collection() user1 = self.Person(name='user1') user1.save() user2 = self.Person(name='user2') user2.save() group = Group() group.save() Group.objects(id=group.id).update(set__members=[user1, user2]) group.reload() self.assertTrue(len(group.members) == 2) self.assertEqual(group.members[0].name, user1.name) self.assertEqual(group.members[1].name, user2.name) Group.drop_collection() def test_types_index(self): """Ensure that and index is used when '_types' is being used in a query. """ class BlogPost(Document): date = DateTimeField() meta = {'indexes': ['-date']} # Indexes are lazy so use list() to perform query list(BlogPost.objects) info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('_types', 1)] in info) self.assertTrue([('_types', 1), ('date', -1)] in info) def test_dont_index_types(self): """Ensure that index_types will, when disabled, prevent _types being added to all indices. """ class BlogPost(Document): date = DateTimeField() meta = {'index_types': False, 'indexes': ['-date']} # Indexes are lazy so use list() to perform query list(BlogPost.objects) info = BlogPost.objects._collection.index_information() info = [value['key'] for key, value in info.iteritems()] self.assertTrue([('_types', 1)] not in info) self.assertTrue([('date', -1)] in info) BlogPost.drop_collection() class BlogPost(Document): title = StringField() meta = {'allow_inheritance': False} # _types is not used on objects where allow_inheritance is False list(BlogPost.objects) info = BlogPost.objects._collection.index_information() self.assertFalse([('_types', 1)] in info.values()) BlogPost.drop_collection() def test_dict_with_custom_baseclass(self): """Ensure DictField working with custom base clases. """ class Test(Document): testdict = DictField() Test.drop_collection() t = Test(testdict={'f': 'Value'}) t.save() self.assertEqual(len(Test.objects(testdict__f__startswith='Val')), 1) self.assertEqual(len(Test.objects(testdict__f='Value')), 1) Test.drop_collection() class Test(Document): testdict = DictField(basecls=StringField) t = Test(testdict={'f': 'Value'}) t.save() self.assertEqual(len(Test.objects(testdict__f='Value')), 1) self.assertEqual(len(Test.objects(testdict__f__startswith='Val')), 1) Test.drop_collection() def test_bulk(self): """Ensure bulk querying by object id returns a proper dict. """ class BlogPost(Document): title = StringField() BlogPost.drop_collection() post_1 = BlogPost(title="Post #1") post_2 = BlogPost(title="Post #2") post_3 = BlogPost(title="Post #3") post_4 = BlogPost(title="Post #4") post_5 = BlogPost(title="Post #5") post_1.save() post_2.save() post_3.save() post_4.save() post_5.save() ids = [post_1.id, post_2.id, post_5.id] objects = BlogPost.objects.in_bulk(ids) self.assertEqual(len(objects), 3) self.assertTrue(post_1.id in objects) self.assertTrue(post_2.id in objects) self.assertTrue(post_5.id in objects) self.assertTrue(objects[post_1.id].title == post_1.title) self.assertTrue(objects[post_2.id].title == post_2.title) self.assertTrue(objects[post_5.id].title == post_5.title) BlogPost.drop_collection() def tearDown(self): self.Person.drop_collection() def test_geospatial_operators(self): """Ensure that geospatial queries are working. """ class Event(Document): title = StringField() date = DateTimeField() location = GeoPointField() def __unicode__(self): return self.title Event.drop_collection() event1 = Event(title="Coltrane Motion @ Double Door", date=datetime.now() - timedelta(days=1), location=[41.909889, -87.677137]) event2 = Event(title="Coltrane Motion @ Bottom of the Hill", date=datetime.now() - timedelta(days=10), location=[37.7749295, -122.4194155]) event3 = Event(title="Coltrane Motion @ Empty Bottle", date=datetime.now(), location=[41.900474, -87.686638]) event1.save() event2.save() event3.save() # find all events "near" pitchfork office, chicago. # note that "near" will show the san francisco event, too, # although it sorts to last. events = Event.objects(location__near=[41.9120459, -87.67892]) self.assertEqual(events.count(), 3) self.assertEqual(list(events), [event1, event3, event2]) # find events within 5 degrees of pitchfork office, chicago point_and_distance = [[41.9120459, -87.67892], 5] events = Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 2) events = list(events) self.assertTrue(event2 not in events) self.assertTrue(event1 in events) self.assertTrue(event3 in events) # ensure ordering is respected by "near" events = Event.objects(location__near=[41.9120459, -87.67892]) events = events.order_by("-date") self.assertEqual(events.count(), 3) self.assertEqual(list(events), [event3, event1, event2]) # find events within 10 degrees of san francisco point_and_distance = [[37.7566023, -122.415579], 10] events = Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 1) self.assertEqual(events[0], event2) # find events within 1 degree of greenpoint, broolyn, nyc, ny point_and_distance = [[40.7237134, -73.9509714], 1] events = Event.objects(location__within_distance=point_and_distance) self.assertEqual(events.count(), 0) # ensure ordering is respected by "within_distance" point_and_distance = [[41.9120459, -87.67892], 10] events = Event.objects(location__within_distance=point_and_distance) events = events.order_by("-date") self.assertEqual(events.count(), 2) self.assertEqual(events[0], event3) # check that within_box works box = [(35.0, -125.0), (40.0, -100.0)] events = Event.objects(location__within_box=box) self.assertEqual(events.count(), 1) self.assertEqual(events[0].id, event2.id) # check that polygon works for users who have a server >= 1.9 server_version = tuple( get_connection().server_info()['version'].split('.') ) required_version = tuple("1.9.0".split(".")) if server_version >= required_version: polygon = [ (41.912114,-87.694445), (41.919395,-87.69084), (41.927186,-87.681742), (41.911731,-87.654276), (41.898061,-87.656164), ] events = Event.objects(location__within_polygon=polygon) self.assertEqual(events.count(), 1) self.assertEqual(events[0].id, event1.id) polygon2 = [ (54.033586,-1.742249), (52.792797,-1.225891), (53.389881,-4.40094) ] events = Event.objects(location__within_polygon=polygon2) self.assertEqual(events.count(), 0) Event.drop_collection() def test_spherical_geospatial_operators(self): """Ensure that spherical geospatial queries are working """ class Point(Document): location = GeoPointField() Point.drop_collection() # These points are one degree apart, which (according to Google Maps) # is about 110 km apart at this place on the Earth. north_point = Point(location=[-122, 38]) # Near Concord, CA south_point = Point(location=[-122, 37]) # Near Santa Cruz, CA north_point.save() south_point.save() earth_radius = 6378.009; # in km (needs to be a float for dividing by) # Finds both points because they are within 60 km of the reference # point equidistant between them. points = Point.objects(location__near_sphere=[-122, 37.5]) self.assertEqual(points.count(), 2) # Same behavior for _within_spherical_distance points = Point.objects( location__within_spherical_distance=[[-122, 37.5], 60/earth_radius] ); self.assertEqual(points.count(), 2) # Finds both points, but orders the north point first because it's # closer to the reference point to the north. points = Point.objects(location__near_sphere=[-122, 38.5]) self.assertEqual(points.count(), 2) self.assertEqual(points[0].id, north_point.id) self.assertEqual(points[1].id, south_point.id) # Finds both points, but orders the south point first because it's # closer to the reference point to the south. points = Point.objects(location__near_sphere=[-122, 36.5]) self.assertEqual(points.count(), 2) self.assertEqual(points[0].id, south_point.id) self.assertEqual(points[1].id, north_point.id) # Finds only one point because only the first point is within 60km of # the reference point to the south. points = Point.objects( location__within_spherical_distance=[[-122, 36.5], 60/earth_radius] ); self.assertEqual(points.count(), 1) self.assertEqual(points[0].id, south_point.id) Point.drop_collection() def test_custom_querysets(self): """Ensure that custom QuerySet classes may be used. """ class CustomQuerySet(QuerySet): def not_empty(self): return len(self) > 0 class Post(Document): meta = {'queryset_class': CustomQuerySet} Post.drop_collection() self.assertTrue(isinstance(Post.objects, CustomQuerySet)) self.assertFalse(Post.objects.not_empty()) Post().save() self.assertTrue(Post.objects.not_empty()) Post.drop_collection() def test_custom_querysets_set_manager_directly(self): """Ensure that custom QuerySet classes may be used. """ class CustomQuerySet(QuerySet): def not_empty(self): return len(self) > 0 class CustomQuerySetManager(QuerySetManager): queryset_class = CustomQuerySet class Post(Document): objects = CustomQuerySetManager() Post.drop_collection() self.assertTrue(isinstance(Post.objects, CustomQuerySet)) self.assertFalse(Post.objects.not_empty()) Post().save() self.assertTrue(Post.objects.not_empty()) Post.drop_collection() def test_custom_querysets_managers_directly(self): """Ensure that custom QuerySet classes may be used. """ class CustomQuerySetManager(QuerySetManager): @staticmethod def get_queryset(doc_cls, queryset): return queryset(is_published=True) class Post(Document): is_published = BooleanField(default=False) published = CustomQuerySetManager() Post.drop_collection() Post().save() Post(is_published=True).save() self.assertEquals(Post.objects.count(), 2) self.assertEquals(Post.published.count(), 1) Post.drop_collection() def test_custom_querysets_inherited(self): """Ensure that custom QuerySet classes may be used. """ class CustomQuerySet(QuerySet): def not_empty(self): return len(self) > 0 class Base(Document): meta = {'abstract': True, 'queryset_class': CustomQuerySet} class Post(Base): pass Post.drop_collection() self.assertTrue(isinstance(Post.objects, CustomQuerySet)) self.assertFalse(Post.objects.not_empty()) Post().save() self.assertTrue(Post.objects.not_empty()) Post.drop_collection() def test_custom_querysets_inherited_direct(self): """Ensure that custom QuerySet classes may be used. """ class CustomQuerySet(QuerySet): def not_empty(self): return len(self) > 0 class CustomQuerySetManager(QuerySetManager): queryset_class = CustomQuerySet class Base(Document): meta = {'abstract': True} objects = CustomQuerySetManager() class Post(Base): pass Post.drop_collection() self.assertTrue(isinstance(Post.objects, CustomQuerySet)) self.assertFalse(Post.objects.not_empty()) Post().save() self.assertTrue(Post.objects.not_empty()) Post.drop_collection() def test_call_after_limits_set(self): """Ensure that re-filtering after slicing works """ class Post(Document): title = StringField() Post.drop_collection() post1 = Post(title="Post 1") post1.save() post2 = Post(title="Post 2") post2.save() posts = Post.objects.all()[0:1] self.assertEqual(len(list(posts())), 1) Post.drop_collection() def test_order_then_filter(self): """Ensure that ordering still works after filtering. """ class Number(Document): n = IntField() Number.drop_collection() n2 = Number.objects.create(n=2) n1 = Number.objects.create(n=1) self.assertEqual(list(Number.objects), [n2, n1]) self.assertEqual(list(Number.objects.order_by('n')), [n1, n2]) self.assertEqual(list(Number.objects.order_by('n').filter()), [n1, n2]) Number.drop_collection() def test_clone(self): """Ensure that cloning clones complex querysets """ class Number(Document): n = IntField() Number.drop_collection() for i in xrange(1, 101): t = Number(n=i) t.save() test = Number.objects test2 = test.clone() self.assertFalse(test == test2) self.assertEqual(test.count(), test2.count()) test = test.filter(n__gt=11) test2 = test.clone() self.assertFalse(test == test2) self.assertEqual(test.count(), test2.count()) test = test.limit(10) test2 = test.clone() self.assertFalse(test == test2) self.assertEqual(test.count(), test2.count()) Number.drop_collection() def test_unset_reference(self): class Comment(Document): text = StringField() class Post(Document): comment = ReferenceField(Comment) Comment.drop_collection() Post.drop_collection() comment = Comment.objects.create(text='test') post = Post.objects.create(comment=comment) self.assertEqual(post.comment, comment) Post.objects.update(unset__comment=1) post.reload() self.assertEqual(post.comment, None) Comment.drop_collection() Post.drop_collection() def test_order_works_with_custom_db_field_names(self): class Number(Document): n = IntField(db_field='number') Number.drop_collection() n2 = Number.objects.create(n=2) n1 = Number.objects.create(n=1) self.assertEqual(list(Number.objects), [n2,n1]) self.assertEqual(list(Number.objects.order_by('n')), [n1,n2]) Number.drop_collection() def test_order_works_with_primary(self): """Ensure that order_by and primary work. """ class Number(Document): n = IntField(primary_key=True) Number.drop_collection() Number(n=1).save() Number(n=2).save() Number(n=3).save() numbers = [n.n for n in Number.objects.order_by('-n')] self.assertEquals([3, 2, 1], numbers) numbers = [n.n for n in Number.objects.order_by('+n')] self.assertEquals([1, 2, 3], numbers) Number.drop_collection() def test_ensure_index(self): """Ensure that manual creation of indexes works. """ class Comment(Document): message = StringField() Comment.objects.ensure_index('message') info = Comment.objects._collection.index_information() info = [(value['key'], value.get('unique', False), value.get('sparse', False)) for key, value in info.iteritems()] self.assertTrue(([('_types', 1), ('message', 1)], False, False) in info) def test_where(self): """Ensure that where clauses work. """ class IntPair(Document): fielda = IntField() fieldb = IntField() IntPair.objects._collection.remove() a = IntPair(fielda=1, fieldb=1) b = IntPair(fielda=1, fieldb=2) c = IntPair(fielda=2, fieldb=1) a.save() b.save() c.save() query = IntPair.objects.where('this[~fielda] >= this[~fieldb]') self.assertEqual('this["fielda"] >= this["fieldb"]', query._where_clause) results = list(query) self.assertEqual(2, len(results)) self.assertTrue(a in results) self.assertTrue(c in results) query = IntPair.objects.where('this[~fielda] == this[~fieldb]') results = list(query) self.assertEqual(1, len(results)) self.assertTrue(a in results) query = IntPair.objects.where('function() { return this[~fielda] >= this[~fieldb] }') self.assertEqual('function() { return this["fielda"] >= this["fieldb"] }', query._where_clause) results = list(query) self.assertEqual(2, len(results)) self.assertTrue(a in results) self.assertTrue(c in results) def invalid_where(): list(IntPair.objects.where(fielda__gte=3)) self.assertRaises(TypeError, invalid_where) def test_scalar(self): class Organization(Document): id = ObjectIdField('_id') name = StringField() class User(Document): id = ObjectIdField('_id') name = StringField() organization = ObjectIdField() User.drop_collection() Organization.drop_collection() whitehouse = Organization(name="White House") whitehouse.save() User(name="Bob Dole", organization=whitehouse.id).save() # Efficient way to get all unique organization names for a given # set of users (Pretend this has additional filtering.) user_orgs = set(User.objects.scalar('organization')) orgs = Organization.objects(id__in=user_orgs).scalar('name') self.assertEqual(list(orgs), ['White House']) # Efficient for generating listings, too. orgs = Organization.objects.scalar('name').in_bulk(list(user_orgs)) user_map = User.objects.scalar('name', 'organization') user_listing = [(user, orgs[org]) for user, org in user_map] self.assertEqual([("Bob Dole", "White House")], user_listing) def test_scalar_simple(self): class TestDoc(Document): x = IntField() y = BooleanField() TestDoc.drop_collection() TestDoc(x=10, y=True).save() TestDoc(x=20, y=False).save() TestDoc(x=30, y=True).save() plist = list(TestDoc.objects.scalar('x', 'y')) self.assertEqual(len(plist), 3) self.assertEqual(plist[0], (10, True)) self.assertEqual(plist[1], (20, False)) self.assertEqual(plist[2], (30, True)) class UserDoc(Document): name = StringField() age = IntField() UserDoc.drop_collection() UserDoc(name="Wilson Jr", age=19).save() UserDoc(name="Wilson", age=43).save() UserDoc(name="Eliana", age=37).save() UserDoc(name="Tayza", age=15).save() ulist = list(UserDoc.objects.scalar('name', 'age')) self.assertEqual(ulist, [ (u'Wilson Jr', 19), (u'Wilson', 43), (u'Eliana', 37), (u'Tayza', 15)]) ulist = list(UserDoc.objects.scalar('name').order_by('age')) self.assertEqual(ulist, [ (u'Tayza'), (u'Wilson Jr'), (u'Eliana'), (u'Wilson')]) def test_scalar_embedded(self): class Profile(EmbeddedDocument): name = StringField() age = IntField() class Locale(EmbeddedDocument): city = StringField() country = StringField() class Person(Document): profile = EmbeddedDocumentField(Profile) locale = EmbeddedDocumentField(Locale) Person.drop_collection() Person(profile=Profile(name="Wilson Jr", age=19), locale=Locale(city="Corumba-GO", country="Brazil")).save() Person(profile=Profile(name="Gabriel Falcao", age=23), locale=Locale(city="New York", country="USA")).save() Person(profile=Profile(name="Lincoln de souza", age=28), locale=Locale(city="Belo Horizonte", country="Brazil")).save() Person(profile=Profile(name="Walter cruz", age=30), locale=Locale(city="Brasilia", country="Brazil")).save() self.assertEqual( list(Person.objects.order_by('profile__age').scalar('profile__name')), [u'Wilson Jr', u'Gabriel Falcao', u'Lincoln de souza', u'Walter cruz']) ulist = list(Person.objects.order_by('locale.city') .scalar('profile__name', 'profile__age', 'locale__city')) self.assertEqual(ulist, [(u'Lincoln de souza', 28, u'Belo Horizonte'), (u'Walter cruz', 30, u'Brasilia'), (u'Wilson Jr', 19, u'Corumba-GO'), (u'Gabriel Falcao', 23, u'New York')]) def test_scalar_decimal(self): from decimal import Decimal class Person(Document): name = StringField() rating = DecimalField() Person.drop_collection() Person(name="Wilson Jr", rating=Decimal('1.0')).save() ulist = list(Person.objects.scalar('name', 'rating')) self.assertEqual(ulist, [(u'Wilson Jr', Decimal('1.0'))]) def test_scalar_reference_field(self): class State(Document): name = StringField() class Person(Document): name = StringField() state = ReferenceField(State) State.drop_collection() Person.drop_collection() s1 = State(name="Goias") s1.save() Person(name="Wilson JR", state=s1).save() plist = list(Person.objects.scalar('name', 'state')) self.assertEqual(plist, [(u'Wilson JR', s1)]) def test_scalar_generic_reference_field(self): class State(Document): name = StringField() class Person(Document): name = StringField() state = GenericReferenceField() State.drop_collection() Person.drop_collection() s1 = State(name="Goias") s1.save() Person(name="Wilson JR", state=s1).save() plist = list(Person.objects.scalar('name', 'state')) self.assertEqual(plist, [(u'Wilson JR', s1)]) def test_scalar_db_field(self): class TestDoc(Document): x = IntField() y = BooleanField() TestDoc.drop_collection() TestDoc(x=10, y=True).save() TestDoc(x=20, y=False).save() TestDoc(x=30, y=True).save() plist = list(TestDoc.objects.scalar('x', 'y')) self.assertEqual(len(plist), 3) self.assertEqual(plist[0], (10, True)) self.assertEqual(plist[1], (20, False)) self.assertEqual(plist[2], (30, True)) def test_scalar_primary_key(self): class SettingValue(Document): key = StringField(primary_key=True) value = StringField() SettingValue.drop_collection() s = SettingValue(key="test", value="test value") s.save() val = SettingValue.objects.scalar('key', 'value') self.assertEqual(list(val), [('test', 'test value')]) def test_scalar_cursor_behaviour(self): """Ensure that a query returns a valid set of results. """ person1 = self.Person(name="User A", age=20) person1.save() person2 = self.Person(name="User B", age=30) person2.save() # Find all people in the collection people = self.Person.objects.scalar('name') self.assertEqual(len(people), 2) results = list(people) self.assertEqual(results[0], "User A") self.assertEqual(results[1], "User B") # Use a query to filter the people found to just person1 people = self.Person.objects(age=20).scalar('name') self.assertEqual(len(people), 1) person = people.next() self.assertEqual(person, "User A") # Test limit people = list(self.Person.objects.limit(1).scalar('name')) self.assertEqual(len(people), 1) self.assertEqual(people[0], 'User A') # Test skip people = list(self.Person.objects.skip(1).scalar('name')) self.assertEqual(len(people), 1) self.assertEqual(people[0], 'User B') person3 = self.Person(name="User C", age=40) person3.save() # Test slice limit people = list(self.Person.objects[:2].scalar('name')) self.assertEqual(len(people), 2) self.assertEqual(people[0], 'User A') self.assertEqual(people[1], 'User B') # Test slice skip people = list(self.Person.objects[1:].scalar('name')) self.assertEqual(len(people), 2) self.assertEqual(people[0], 'User B') self.assertEqual(people[1], 'User C') # Test slice limit and skip people = list(self.Person.objects[1:2].scalar('name')) self.assertEqual(len(people), 1) self.assertEqual(people[0], 'User B') people = list(self.Person.objects[1:1].scalar('name')) self.assertEqual(len(people), 0) # Test slice out of range people = list(self.Person.objects.scalar('name')[80000:80001]) self.assertEqual(len(people), 0) # Test larger slice __repr__ self.Person.objects.delete() for i in xrange(55): self.Person(name='A%s' % i, age=i).save() self.assertEqual(len(self.Person.objects.scalar('name')), 55) self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) self.assertEqual("[u'A1', u'A2']", "%s" % self.Person.objects.order_by('age').scalar('name')[1:3]) self.assertEqual("[u'A51', u'A52']", "%s" % self.Person.objects.order_by('age').scalar('name')[51:53]) # with_id and in_bulk person = self.Person.objects.order_by('name').first() self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id)) pks = self.Person.objects.order_by('age').scalar('pk')[1:3] self.assertEqual("[u'A1', u'A2']", "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values())) class QTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') def test_empty_q(self): """Ensure that empty Q objects won't hurt. """ q1 = Q() q2 = Q(age__gte=18) q3 = Q() q4 = Q(name='test') q5 = Q() class Person(Document): name = StringField() age = IntField() query = {'$or': [{'age': {'$gte': 18}}, {'name': 'test'}]} self.assertEqual((q1 | q2 | q3 | q4 | q5).to_query(Person), query) query = {'age': {'$gte': 18}, 'name': 'test'} self.assertEqual((q1 & q2 & q3 & q4 & q5).to_query(Person), query) def test_q_with_dbref(self): """Ensure Q objects handle DBRefs correctly""" connect(db='mongoenginetest') class User(Document): pass class Post(Document): created_user = ReferenceField(User) user = User.objects.create() Post.objects.create(created_user=user) self.assertEqual(Post.objects.filter(created_user=user).count(), 1) self.assertEqual(Post.objects.filter(Q(created_user=user)).count(), 1) def test_and_combination(self): """Ensure that Q-objects correctly AND together. """ class TestDoc(Document): x = IntField() y = StringField() # Check than an error is raised when conflicting queries are anded def invalid_combination(): query = Q(x__lt=7) & Q(x__lt=3) query.to_query(TestDoc) self.assertRaises(InvalidQueryError, invalid_combination) # Check normal cases work without an error query = Q(x__lt=7) & Q(x__gt=3) q1 = Q(x__lt=7) q2 = Q(x__gt=3) query = (q1 & q2).to_query(TestDoc) self.assertEqual(query, {'x': {'$lt': 7, '$gt': 3}}) # More complex nested example query = Q(x__lt=100) & Q(y__ne='NotMyString') query &= Q(y__in=['a', 'b', 'c']) & Q(x__gt=-100) mongo_query = { 'x': {'$lt': 100, '$gt': -100}, 'y': {'$ne': 'NotMyString', '$in': ['a', 'b', 'c']}, } self.assertEqual(query.to_query(TestDoc), mongo_query) def test_or_combination(self): """Ensure that Q-objects correctly OR together. """ class TestDoc(Document): x = IntField() q1 = Q(x__lt=3) q2 = Q(x__gt=7) query = (q1 | q2).to_query(TestDoc) self.assertEqual(query, { '$or': [ {'x': {'$lt': 3}}, {'x': {'$gt': 7}}, ] }) def test_and_or_combination(self): """Ensure that Q-objects handle ANDing ORed components. """ class TestDoc(Document): x = IntField() y = BooleanField() query = (Q(x__gt=0) | Q(x__exists=False)) query &= Q(x__lt=100) self.assertEqual(query.to_query(TestDoc), { '$or': [ {'x': {'$lt': 100, '$gt': 0}}, {'x': {'$lt': 100, '$exists': False}}, ] }) q1 = (Q(x__gt=0) | Q(x__exists=False)) q2 = (Q(x__lt=100) | Q(y=True)) query = (q1 & q2).to_query(TestDoc) self.assertEqual(['$or'], query.keys()) conditions = [ {'x': {'$lt': 100, '$gt': 0}}, {'x': {'$lt': 100, '$exists': False}}, {'x': {'$gt': 0}, 'y': True}, {'x': {'$exists': False}, 'y': True}, ] self.assertEqual(len(conditions), len(query['$or'])) for condition in conditions: self.assertTrue(condition in query['$or']) def test_or_and_or_combination(self): """Ensure that Q-objects handle ORing ANDed ORed components. :) """ class TestDoc(Document): x = IntField() y = BooleanField() q1 = (Q(x__gt=0) & (Q(y=True) | Q(y__exists=False))) q2 = (Q(x__lt=100) & (Q(y=False) | Q(y__exists=False))) query = (q1 | q2).to_query(TestDoc) self.assertEqual(['$or'], query.keys()) conditions = [ {'x': {'$gt': 0}, 'y': True}, {'x': {'$gt': 0}, 'y': {'$exists': False}}, {'x': {'$lt': 100}, 'y':False}, {'x': {'$lt': 100}, 'y': {'$exists': False}}, ] self.assertEqual(len(conditions), len(query['$or'])) for condition in conditions: self.assertTrue(condition in query['$or']) def test_q_clone(self): class TestDoc(Document): x = IntField() TestDoc.drop_collection() for i in xrange(1, 101): t = TestDoc(x=i) t.save() # Check normal cases work without an error test = TestDoc.objects(Q(x__lt=7) & Q(x__gt=3)) self.assertEqual(test.count(), 3) test2 = test.clone() self.assertEqual(test2.count(), 3) self.assertFalse(test2 == test) test2.filter(x=6) self.assertEqual(test2.count(), 1) self.assertEqual(test.count(), 3) class QueryFieldListTest(unittest.TestCase): def test_empty(self): q = QueryFieldList() self.assertFalse(q) q = QueryFieldList(always_include=['_cls']) self.assertFalse(q) def test_include_include(self): q = QueryFieldList() q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'a': True, 'b': True}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'b': True}) def test_include_exclude(self): q = QueryFieldList() q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'a': True, 'b': True}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': True}) def test_exclude_exclude(self): q = QueryFieldList() q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': False, 'b': False}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': False, 'b': False, 'c': False}) def test_exclude_include(self): q = QueryFieldList() q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) self.assertEqual(q.as_dict(), {'a': False, 'b': False}) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'c': True}) def test_always_include(self): q = QueryFieldList(always_include=['x', 'y']) q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) def test_reset(self): q = QueryFieldList(always_include=['x', 'y']) q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'c': True}) q.reset() self.assertFalse(q) q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) self.assertEqual(q.as_dict(), {'x': True, 'y': True, 'b': True, 'c': True}) def test_using_a_slice(self): q = QueryFieldList() q += QueryFieldList(fields=['a'], value={"$slice": 5}) self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) def test_elem_match(self): class Foo(EmbeddedDocument): shape = StringField() color = StringField() trick = BooleanField() meta = {'allow_inheritance': False} class Bar(Document): foo = ListField(EmbeddedDocumentField(Foo)) meta = {'allow_inheritance': False} Bar.drop_collection() b1 = Bar(foo=[Foo(shape= "square", color ="purple", thick = False), Foo(shape= "circle", color ="red", thick = True)]) b1.save() b2 = Bar(foo=[Foo(shape= "square", color ="red", thick = True), Foo(shape= "circle", color ="purple", thick = False)]) b2.save() ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"})) self.assertEqual([b1], ak) if __name__ == '__main__': unittest.main() MongoEngine-mongoengine-7a1b110/tests/test_replicaset_connection.py000066400000000000000000000017241177143177100257020ustar00rootroot00000000000000import unittest import pymongo from pymongo import ReadPreference, ReplicaSetConnection import mongoengine from mongoengine import * from mongoengine.connection import get_db, get_connection, ConnectionError class ConnectionTest(unittest.TestCase): def tearDown(self): mongoengine.connection._connection_settings = {} mongoengine.connection._connections = {} mongoengine.connection._dbs = {} def test_replicaset_uri_passes_read_preference(self): """Requires a replica set called "rs" on port 27017 """ try: conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY) except ConnectionError, e: return if not isinstance(conn, ReplicaSetConnection): return self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY) if __name__ == '__main__': unittest.main() MongoEngine-mongoengine-7a1b110/tests/test_signals.py000066400000000000000000000217741177143177100227770ustar00rootroot00000000000000# -*- coding: utf-8 -*- import unittest from mongoengine import * from mongoengine import signals signal_output = [] class SignalTests(unittest.TestCase): """ Testing signals before/after saving and deleting. """ def get_signal_output(self, fn, *args, **kwargs): # Flush any existing signal output global signal_output signal_output = [] fn(*args, **kwargs) return signal_output def setUp(self): connect(db='mongoenginetest') class Author(Document): name = StringField() def __unicode__(self): return self.name @classmethod def pre_init(cls, sender, document, *args, **kwargs): signal_output.append('pre_init signal, %s' % cls.__name__) signal_output.append(str(kwargs['values'])) @classmethod def post_init(cls, sender, document, **kwargs): signal_output.append('post_init signal, %s' % document) @classmethod def pre_save(cls, sender, document, **kwargs): signal_output.append('pre_save signal, %s' % document) @classmethod def post_save(cls, sender, document, **kwargs): signal_output.append('post_save signal, %s' % document) if 'created' in kwargs: if kwargs['created']: signal_output.append('Is created') else: signal_output.append('Is updated') @classmethod def pre_delete(cls, sender, document, **kwargs): signal_output.append('pre_delete signal, %s' % document) @classmethod def post_delete(cls, sender, document, **kwargs): signal_output.append('post_delete signal, %s' % document) @classmethod def pre_bulk_insert(cls, sender, documents, **kwargs): signal_output.append('pre_bulk_insert signal, %s' % documents) @classmethod def post_bulk_insert(cls, sender, documents, **kwargs): signal_output.append('post_bulk_insert signal, %s' % documents) if kwargs.get('loaded', False): signal_output.append('Is loaded') else: signal_output.append('Not loaded') self.Author = Author class Another(Document): name = StringField() def __unicode__(self): return self.name @classmethod def pre_init(cls, sender, document, **kwargs): signal_output.append('pre_init Another signal, %s' % cls.__name__) signal_output.append(str(kwargs['values'])) @classmethod def post_init(cls, sender, document, **kwargs): signal_output.append('post_init Another signal, %s' % document) @classmethod def pre_save(cls, sender, document, **kwargs): signal_output.append('pre_save Another signal, %s' % document) @classmethod def post_save(cls, sender, document, **kwargs): signal_output.append('post_save Another signal, %s' % document) if 'created' in kwargs: if kwargs['created']: signal_output.append('Is created') else: signal_output.append('Is updated') @classmethod def pre_delete(cls, sender, document, **kwargs): signal_output.append('pre_delete Another signal, %s' % document) @classmethod def post_delete(cls, sender, document, **kwargs): signal_output.append('post_delete Another signal, %s' % document) self.Another = Another # Save up the number of connected signals so that we can check at the end # that all the signals we register get properly unregistered self.pre_signals = ( len(signals.pre_init.receivers), len(signals.post_init.receivers), len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), len(signals.pre_bulk_insert.receivers), len(signals.post_bulk_insert.receivers), ) signals.pre_init.connect(Author.pre_init, sender=Author) signals.post_init.connect(Author.post_init, sender=Author) signals.pre_save.connect(Author.pre_save, sender=Author) signals.post_save.connect(Author.post_save, sender=Author) signals.pre_delete.connect(Author.pre_delete, sender=Author) signals.post_delete.connect(Author.post_delete, sender=Author) signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author) signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author) signals.pre_init.connect(Another.pre_init, sender=Another) signals.post_init.connect(Another.post_init, sender=Another) signals.pre_save.connect(Another.pre_save, sender=Another) signals.post_save.connect(Another.post_save, sender=Another) signals.pre_delete.connect(Another.pre_delete, sender=Another) signals.post_delete.connect(Another.post_delete, sender=Another) def tearDown(self): signals.pre_init.disconnect(self.Author.pre_init) signals.post_init.disconnect(self.Author.post_init) signals.post_delete.disconnect(self.Author.post_delete) signals.pre_delete.disconnect(self.Author.pre_delete) signals.post_save.disconnect(self.Author.post_save) signals.pre_save.disconnect(self.Author.pre_save) signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) signals.pre_init.disconnect(self.Another.pre_init) signals.post_init.disconnect(self.Another.post_init) signals.post_delete.disconnect(self.Another.post_delete) signals.pre_delete.disconnect(self.Another.pre_delete) signals.post_save.disconnect(self.Another.post_save) signals.pre_save.disconnect(self.Another.pre_save) # Check that all our signals got disconnected properly. post_signals = ( len(signals.pre_init.receivers), len(signals.post_init.receivers), len(signals.pre_save.receivers), len(signals.post_save.receivers), len(signals.pre_delete.receivers), len(signals.post_delete.receivers), len(signals.pre_bulk_insert.receivers), len(signals.post_bulk_insert.receivers), ) self.assertEqual(self.pre_signals, post_signals) def test_model_signals(self): """ Model saves should throw some signals. """ def create_author(): a1 = self.Author(name='Bill Shakespeare') def bulk_create_author_with_load(): a1 = self.Author(name='Bill Shakespeare') self.Author.objects.insert([a1], load_bulk=True) def bulk_create_author_without_load(): a1 = self.Author(name='Bill Shakespeare') self.Author.objects.insert([a1], load_bulk=False) self.assertEqual(self.get_signal_output(create_author), [ "pre_init signal, Author", "{'name': 'Bill Shakespeare'}", "post_init signal, Bill Shakespeare", ]) a1 = self.Author(name='Bill Shakespeare') self.assertEqual(self.get_signal_output(a1.save), [ "pre_save signal, Bill Shakespeare", "post_save signal, Bill Shakespeare", "Is created" ]) a1.reload() a1.name='William Shakespeare' self.assertEqual(self.get_signal_output(a1.save), [ "pre_save signal, William Shakespeare", "post_save signal, William Shakespeare", "Is updated" ]) self.assertEqual(self.get_signal_output(a1.delete), [ 'pre_delete signal, William Shakespeare', 'post_delete signal, William Shakespeare', ]) signal_output = self.get_signal_output(bulk_create_author_with_load) # The output of this signal is not entirely deterministic. The reloaded # object will have an object ID. Hence, we only check part of the output self.assertEquals(signal_output[3], "pre_bulk_insert signal, []") self.assertEquals(signal_output[-2:], ["post_bulk_insert signal, []", "Is loaded",]) self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ "pre_init signal, Author", "{'name': 'Bill Shakespeare'}", "post_init signal, Bill Shakespeare", "pre_bulk_insert signal, []", "post_bulk_insert signal, []", "Not loaded", ]) self.Author.objects.delete()