diff --git a/.github/workflows/github-actions.yml b/.github/workflows/github-actions.yml index b6820a33e..bed3ea878 100644 --- a/.github/workflows/github-actions.yml +++ b/.github/workflows/github-actions.yml @@ -6,20 +6,29 @@ on: push: branches: - master + # Manual trigger from Action page + workflow_dispatch: # release tags create: tags: - 'v[0-9]+\.[0-9]+\.[0-9]+*' env: - MONGODB_3_6: 3.6.14 - MONGODB_4_0: 4.0.23 - MONGODB_4_2: 4.2 + MONGODB_3_6: 3.6.23 + MONGODB_4_0: 4.0.28 MONGODB_4_4: 4.4 + MONGODB_5_0: "5.0" + MONGODB_6_0: "6.0" + MONGODB_7_0: "7.0" PYMONGO_3_4: 3.4 PYMONGO_3_6: 3.6 PYMONGO_3_9: 3.9 PYMONGO_3_11: 3.11 + PYMONGO_3_12: 3.12 + PYMONGO_4_0: 4.0 + PYMONGO_4_3: 4.3.2 + PYMONGO_4_4: 4.4.1 + PYMONGO_4_6: 4.6.0 MAIN_PYTHON_VERSION: 3.7 @@ -29,40 +38,53 @@ jobs: # which runs pre-configured linter & autoformatter runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.7 - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: '3.9' + check-latest: true - run: bash .github/workflows/install_ci_python_dep.sh - run: pre-commit run -a test: # Test suite run against recent python versions # and against a few combination of MongoDB and pymongo - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 strategy: fail-fast: false matrix: - python-version: [3.6, 3.7, 3.8, 3.9, pypy3] + python-version: [3.7, 3.8, 3.9, "3.10", 3.11, pypy3.9] MONGODB: [$MONGODB_4_0] PYMONGO: [$PYMONGO_3_11] include: - python-version: 3.7 MONGODB: $MONGODB_3_6 PYMONGO: $PYMONGO_3_9 - - python-version: 3.7 - MONGODB: $MONGODB_4_2 - PYMONGO: $PYMONGO_3_6 - - python-version: 3.7 + - python-version: 3.8 MONGODB: $MONGODB_4_4 PYMONGO: $PYMONGO_3_11 + - python-version: 3.9 + MONGODB: $MONGODB_4_4 + PYMONGO: $PYMONGO_3_12 + - python-version: "3.10" + MONGODB: $MONGODB_4_4 + PYMONGO: $PYMONGO_4_0 + - python-version: "3.11" + MONGODB: $MONGODB_5_0 + PYMONGO: $PYMONGO_4_3 + - python-version: "3.11" + MONGODB: $MONGODB_6_0 + PYMONGO: $PYMONGO_4_4 + - python-version: "3.11" + MONGODB: $MONGODB_7_0 + PYMONGO: $PYMONGO_4_6 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} + check-latest: true - name: install mongo and ci dependencies run: | bash .github/workflows/install_mongo.sh ${{ matrix.MONGODB }} @@ -84,11 +106,12 @@ jobs: # to avoid that it breaks when new releases are being created runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.7 + check-latest: true - name: install python dep run: | pip install -e . @@ -98,39 +121,31 @@ jobs: cd docs make html-readthedocs - build-n-publish-dummy: + build-dryrun: runs-on: ubuntu-latest needs: [linting, test, build_doc_dryrun] - if: github.event_name != 'pull_request' steps: - - uses: actions/checkout@master - - name: Set up Python 3.7 - uses: actions/setup-python@v1 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.9 + check-latest: true - name: build dummy wheel for test-pypi run: | pip install wheel - python setup.py egg_info -b ".dev`date '+%Y%m%d%H%M%S'`" build sdist bdist_wheel -# - name: publish test-pypi -# # Although working and recommended, test-pypi has a limit -# # in the size of projects so it's better to avoid publishing -# # until there is a way to garbage collect these dummy releases -# uses: pypa/gh-action-pypi-publish@master -# with: -# password: ${{ secrets.test_pypi_token }} -# repository_url: https://test.pypi.org/legacy/ + python setup.py sdist bdist_wheel build-n-publish: runs-on: ubuntu-latest - needs: [linting, test, build_doc_dryrun, build-n-publish-dummy] + needs: [linting, test, build_doc_dryrun, build-dryrun] if: github.event_name == 'create' && startsWith(github.ref, 'refs/tags/v') steps: - - uses: actions/checkout@master - - name: Set up Python 3.7 - uses: actions/setup-python@v1 + - uses: actions/checkout@v3 + - name: Set up Python 3.9 + uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.9 + check-latest: true # todo separate build from publish # https://stackoverflow.com/questions/59349905/which-properties-does-github-event-in-a-github-workflow-have - name: build dummy wheel for test-pypi @@ -138,6 +153,6 @@ jobs: pip install wheel python setup.py sdist bdist_wheel - name: publish pypi - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.pypi_token }} diff --git a/.github/workflows/install_mongo.sh b/.github/workflows/install_mongo.sh index 136f5c203..aece5f1e2 100644 --- a/.github/workflows/install_mongo.sh +++ b/.github/workflows/install_mongo.sh @@ -9,6 +9,12 @@ if [[ "$MONGODB" == *"4.2"* ]]; then mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest elif [[ "$MONGODB" == *"4.4"* ]]; then mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest +elif [[ "$MONGODB" == *"5.0"* ]]; then + mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest +elif [[ "$MONGODB" == *"6.0"* ]]; then + mongo_build=mongodb-linux-x86_64-ubuntu1804-v${MONGODB}-latest +elif [[ "$MONGODB" == *"7.0"* ]]; then + mongo_build=mongodb-linux-x86_64-ubuntu2004-v${MONGODB}-latest fi wget http://fastdl.mongodb.org/linux/$mongo_build.tgz diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5131c8c78..b5e342679 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,26 +1,28 @@ fail_fast: false repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.4.0 hooks: - id: check-merge-conflict - id: debug-statements - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/ambv/black - rev: 21.4b2 + rev: 23.9.1 hooks: - id: black - - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.1 + - repo: https://github.com/pycqa/flake8 + rev: 6.1.0 hooks: - id: flake8 + additional_dependencies: + - importlib_metadata<5 - repo: https://github.com/asottile/pyupgrade - rev: v2.14.0 + rev: v3.14.0 hooks: - id: pyupgrade args: [--py36-plus] - repo: https://github.com/pycqa/isort - rev: 5.8.0 + rev: 5.12.0 hooks: - id: isort diff --git a/AUTHORS b/AUTHORS index 0c7f6b46d..40508b532 100644 --- a/AUTHORS +++ b/AUTHORS @@ -260,3 +260,7 @@ that much better: * Stankiewicz Mateusz (https://github.com/mas15) * Felix Schultheiß (https://github.com/felix-smashdocs) * Jan Stein (https://github.com/janste63) + * Timothé Perez (https://github.com/AchilleAsh) + * oleksandr-l5 (https://github.com/oleksandr-l5) + * Ido Shraga (https://github.com/idoshr) + * Terence Honles (https://github.com/terencehonles) diff --git a/README.rst b/README.rst index a74c26aa8..64e43b6bc 100644 --- a/README.rst +++ b/README.rst @@ -15,6 +15,12 @@ MongoEngine .. image:: https://img.shields.io/badge/code%20style-black-000000.svg :target: https://github.com/ambv/black +.. image:: https://pepy.tech/badge/mongoengine/month + :target: https://pepy.tech/project/mongoengine + +.. image:: https://img.shields.io/pypi/v/mongoengine.svg + :target: https://pypi.python.org/pypi/mongoengine + About ===== MongoEngine is a Python Object-Document Mapper for working with MongoDB. @@ -25,10 +31,10 @@ an `API reference `_. Supported MongoDB Versions ========================== -MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions +MongoEngine is currently tested against MongoDB v3.6, v4.0, v4.4, v5.0, v6.0 and v7.0. Future versions should be supported as well, but aren't actively tested at the moment. Make sure to open an issue or submit a pull request if you experience any problems -with MongoDB version > 4.0. +with a more recent MongoDB versions. Installation ============ diff --git a/benchmarks/test_basic_doc_ops.py b/benchmarks/test_basic_doc_ops.py index 91efc7e32..8b8bf4aaf 100644 --- a/benchmarks/test_basic_doc_ops.py +++ b/benchmarks/test_basic_doc_ops.py @@ -12,7 +12,7 @@ StringField, ) -mongoengine.connect(db="mongoengine_benchmark_test") +mongoengine.connect(db="mongoengine_benchmark_test", w=1) def timeit(f, n=10000): @@ -38,34 +38,34 @@ def init_book(): author_email="alec@example.com", ) - print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6)) + print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10**6)) b = init_book() - print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6)) + print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10**6)) print( "Doc setattr: %.3fus" - % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) # noqa B010 + % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10**6) # noqa B010 ) - print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6)) + print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10**6)) - print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6)) + print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10**6)) def save_book(): b._mark_as_changed("name") b._mark_as_changed("tags") b.save() - print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6)) + print("Save to database: %.3fus" % (timeit(save_book, 100) * 10**6)) son = b.to_mongo() print( - "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6) + "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10**6) ) print( - "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6) + "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10**6) ) def create_and_delete_book(): @@ -75,7 +75,7 @@ def create_and_delete_book(): print( "Init + save to database + delete: %.3fms" - % (timeit(create_and_delete_book, 10) * 10 ** 3) + % (timeit(create_and_delete_book, 10) * 10**3) ) @@ -101,9 +101,9 @@ def init_company(): ) company = init_company() - print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3)) + print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10**3)) - print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3)) + print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10**3)) company.save() @@ -112,17 +112,17 @@ def save_company(): company._mark_as_changed("contacts") company.save() - print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3)) + print("Save to database: %.3fms" % (timeit(save_company, 100) * 10**3)) son = company.to_mongo() print( "Load from SON: %.3fms" - % (timeit(lambda: Company._from_son(son), 100) * 10 ** 3) + % (timeit(lambda: Company._from_son(son), 100) * 10**3) ) print( "Load from database: %.3fms" - % (timeit(lambda: Company.objects[0], 100) * 10 ** 3) + % (timeit(lambda: Company.objects[0], 100) * 10**3) ) def create_and_delete_company(): @@ -132,7 +132,7 @@ def create_and_delete_company(): print( "Init + save to database + delete: %.3fms" - % (timeit(create_and_delete_company, 10) * 10 ** 3) + % (timeit(create_and_delete_company, 10) * 10**3) ) diff --git a/benchmarks/test_inserts.py b/benchmarks/test_inserts.py index dcd18ff88..8e8419933 100644 --- a/benchmarks/test_inserts.py +++ b/benchmarks/test_inserts.py @@ -5,15 +5,11 @@ def main(): setup = """ from pymongo import MongoClient -connection = MongoClient() +connection = MongoClient(w=1) connection.drop_database('mongoengine_benchmark_test') """ stmt = """ -from pymongo import MongoClient - -connection = MongoClient() - db = connection.mongoengine_benchmark_test noddy = db.noddy @@ -29,13 +25,12 @@ def main(): """ print("-" * 100) - print("PyMongo: Creating 10000 dictionaries.") + print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 1}).') t = timeit.Timer(stmt=stmt, setup=setup) print(f"{t.timeit(1)}s") stmt = """ -from pymongo import MongoClient, WriteConcern -connection = MongoClient() +from pymongo import WriteConcern db = connection.mongoengine_benchmark_test noddy = db.noddy.with_options(write_concern=WriteConcern(w=0)) @@ -64,7 +59,7 @@ def main(): connection.close() from mongoengine import Document, DictField, connect -connect("mongoengine_benchmark_test") +connect("mongoengine_benchmark_test", w=1) class Noddy(Document): fields = DictField() @@ -82,7 +77,7 @@ class Noddy(Document): """ print("-" * 100) - print("MongoEngine: Creating 10000 dictionaries.") + print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 1}).') t = timeit.Timer(stmt=stmt, setup=setup) print(f"{t.timeit(1)}s") diff --git a/benchmarks/test_save_with_indexes.py b/benchmarks/test_save_with_indexes.py new file mode 100644 index 000000000..86e281cb3 --- /dev/null +++ b/benchmarks/test_save_with_indexes.py @@ -0,0 +1,87 @@ +import timeit + + +def main(): + setup = """ +from pymongo import MongoClient + +connection = MongoClient() +connection.drop_database("mongoengine_benchmark_test") +connection.close() + +from mongoengine import connect, Document, IntField, StringField +connect("mongoengine_benchmark_test", w=1) + +class User0(Document): + name = StringField() + age = IntField() + +class User1(Document): + name = StringField() + age = IntField() + meta = {"indexes": [["name"]]} + +class User2(Document): + name = StringField() + age = IntField() + meta = {"indexes": [["name", "age"]]} + +class User3(Document): + name = StringField() + age = IntField() + meta = {"indexes": [["name"]], "auto_create_index_on_save": True} + +class User4(Document): + name = StringField() + age = IntField() + meta = {"indexes": [["name", "age"]], "auto_create_index_on_save": True} +""" + + stmt = """ +for i in range(10000): + User0(name="Nunu", age=9).save() +""" + print("-" * 80) + print("Save 10000 documents with 0 indexes.") + t = timeit.Timer(stmt=stmt, setup=setup) + print(f"{min(t.repeat(repeat=3, number=1))}s") + + stmt = """ +for i in range(10000): + User1(name="Nunu", age=9).save() +""" + print("-" * 80) + print("Save 10000 documents with 1 index.") + t = timeit.Timer(stmt=stmt, setup=setup) + print(f"{min(t.repeat(repeat=3, number=1))}s") + + stmt = """ +for i in range(10000): + User2(name="Nunu", age=9).save() +""" + print("-" * 80) + print("Save 10000 documents with 2 indexes.") + t = timeit.Timer(stmt=stmt, setup=setup) + print(f"{min(t.repeat(repeat=3, number=1))}s") + + stmt = """ +for i in range(10000): + User3(name="Nunu", age=9).save() +""" + print("-" * 80) + print("Save 10000 documents with 1 index (auto_create_index_on_save=True).") + t = timeit.Timer(stmt=stmt, setup=setup) + print(f"{min(t.repeat(repeat=3, number=1))}s") + + stmt = """ +for i in range(10000): + User4(name="Nunu", age=9).save() +""" + print("-" * 80) + print("Save 10000 documents with 2 indexes (auto_create_index_on_save=True).") + t = timeit.Timer(stmt=stmt, setup=setup) + print(f"{min(t.repeat(repeat=3, number=1))}s") + + +if __name__ == "__main__": + main() diff --git a/docs/Makefile b/docs/Makefile index 0bf353561..301b44fa8 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -27,6 +27,9 @@ help: @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" +install-deps: + -pip install -r requirements.txt + clean: -rm -rf $(BUILDDIR)/* diff --git a/docs/apireference.rst b/docs/apireference.rst index 218946115..dbcb3b84e 100644 --- a/docs/apireference.rst +++ b/docs/apireference.rst @@ -75,6 +75,7 @@ Fields .. autoclass:: mongoengine.fields.StringField .. autoclass:: mongoengine.fields.URLField .. autoclass:: mongoengine.fields.EmailField +.. autoclass:: mongoengine.fields.EnumField .. autoclass:: mongoengine.fields.IntField .. autoclass:: mongoengine.fields.LongField .. autoclass:: mongoengine.fields.FloatField diff --git a/docs/changelog.rst b/docs/changelog.rst index 3f4252a55..2a724094b 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -7,10 +7,105 @@ Changelog Development =========== - (Fill this out as you fix issues and develop your features). -- EnumField improvements: now `choices` limits the values of an enum to allow +- Fix for uuidRepresentation not read when provided in URI #2741 +- Add tests against MongoDB 6.0 and MongoDB 7.0 in the pipeline +- Fix validate() not being called when inheritance is used in EmbeddedDocument and validate is overriden #2784 +- Add support for readPreferenceTags in connection parameters #2644 + +Changes in 0.27.0 +================= +- Update uuidRepresentation warnings with "unspecified" as the future default (instead of 'standard' previously advertised) #2739 +- Added `mongo_client_class` optional parameter to connect() to allow to use an alternative mongo client than pymongo.MongoClient. + Typically to support mock mongo libraries like mongomock, montydb, mongita #2729 +- BREAKING CHANGE: connecting MongoEngine with mongomock should now use the new `mongo_client_class` + For more info, check https://docs.mongoengine.org/guide/mongomock.html +- Fix DictField that always gets marked as changed #2606 +- fix for Queryset.none() that has no effect on update/aggregate / first #2669 + +Changes in 0.26.0 +================= +- BREAKING CHANGE: Improved the performance of :meth:`~mongoengine.Document.save()` + by removing the call to :meth:`~mongoengine.Document.ensure_indexes` unless + ``meta['auto_create_index_on_save']`` is set to True. With the default settings, Document indexes + will still be created on the fly, during the first usage of the collection (query, insert, etc), + they will just not be re-created whenever .save() is called. +- Added meta ``auto_create_index_on_save`` so you can enable index creation + on :meth:`~mongoengine.Document.save()` (as it was < 0.26.0). +- BREAKING CHANGE: remove deprecated method ``ensure_index`` (replaced by ``create_index`` long time ago). +- Addition of Decimal128Field: :class:`~mongoengine.fields.Decimal128Field` for accurate representation of Decimals (much better than the legacy field DecimalField). + Although it could work to switch an existing DecimalField to Decimal128Field without applying a migration script, + it is not recommended to do so (DecimalField uses float/str to store the value, Decimal128Field uses Decimal128). +- BREAKING CHANGE: When using ListField(EnumField) or DictField(EnumField), the values weren't always cast into the Enum (#2531) +- BREAKING CHANGE (bugfix) Querying ObjectIdField or ComplexDateTimeField with None no longer raise a ValidationError (#2681) +- Allow updating a field that has an operator name e.g. "type" with .update(set__type="foo"). It was raising an error previously. #2595 + +Changes in 0.25.0 +================= +- Support MONGODB-AWS authentication mechanism (with `authmechanismproperties`) #2507 +- Bug Fix - distinct query doesn't obey the ``no_dereference()``. #2663 +- Add tests against Mongo 5.0 in pipeline +- Drop support for Python 3.6 (EOL) +- Bug fix support for PyMongo>=4 to fix "pymongo.errors.InvalidOperation: Cannot use MongoClient after close" + errors. #2627 + +Changes in 0.24.2 +================= +- Bug fix regarding uuidRepresentation that was case sensitive #2650 + +Changes in 0.24.1 +================= +- Allow pymongo<5.0 to be pulled +- Don't use deprecated property for emptiness check in queryset base #2633 + +Changes in 0.24.0 +================= +- EnumField improvements: now ``choices`` limits the values of an enum to allow +- Fix bug that prevented instance queryset from using custom queryset_class #2589 +- Fix deepcopy of EmbeddedDocument #2202 +- Introduce a base exception class for MongoEngine exceptions (MongoEngineException). + Note that this doesn't concern the pymongo errors #2515 +- Fix error when using precision=0 with DecimalField #2535 +- Add support for regex and whole word text search query #2568 +- Add support for update aggregation pipeline #2578 +- BREAKING CHANGE: Updates to support pymongo 4.0. Where possible deprecated + functionality has been migrated, but additional care should be taken when + migrating to pymongo 4.0 as existing code may have been using deprecated + features which have now been removed #2614. + + For the pymongo migration guide see: + https://pymongo.readthedocs.io/en/stable/migrate-to-pymongo4.html. + + In addition to the changes in the migration guide, the following is a high + level overview of the changes made to MongoEngine when using pymongo 4.0: + + - limited support of geohaystack indexes has been removed + - ``QuerySet.map_reduce`` has been migrated from ``Collection.map_reduce`` + and ``Collection.inline_map_reduce`` to use + ``db.command({mapReduce: ..., ...})`` and support between the two may need + additional verification. + - UUIDs are encoded with the ``pythonLegacy`` encoding by default instead of + the newer and cross platform ``standard`` encoding. Existing UUIDs will + need to be migrated before changing the encoding, and this should be done + explicitly by the user rather than switching to a new default by + MongoEngine. This default will change at a later date, but to allow + specifying and then migrating to the new format a default ``json_options`` + has been provided. + - ``Queryset.count`` has been using ``Collection.count_documents`` and + transparently falling back to ``Collection.count`` when using features that + are not supported by ``Collection.count_documents``. ``Collection.count`` + has been removed and no automatic fallback is possible. The migration guide + documents the extended functionality which is no longer supported. Rewrite + the unsupported queries or fetch the whole result set and perform the count + locally. + - Pymongo 4 removed db.authenticate(), on which we were relying for authenticating + with username/password. The migration involved switching to providing credentials to + MongoClient BUT in case the authSource isn't provided, db.authenticate used to default to + authSource=current-database and MongoClient defaults to authSource="admin". Long story short, + if you observe authentication issue after migrating, make sure you provide the authSource + explicitly. (see #2626) Changes in 0.23.1 -=========== +================= - Bug fix: ignore LazyReferenceFields when clearing _changed_fields #2484 - Improve connection doc #2481 diff --git a/docs/guide/connecting.rst b/docs/guide/connecting.rst index 387151dfe..2cbdb4453 100644 --- a/docs/guide/connecting.rst +++ b/docs/guide/connecting.rst @@ -186,6 +186,10 @@ access to the same User document across databases:: with switch_db(User, 'archive-user-db') as User: User(name='Ross').save() # Saves the 'archive-user-db' +.. note:: :func:`~mongoengine.context_managers.switch_db` when used on + a class that allow inheritance will change the database alias + for instances of a given class only - instances of subclasses will still use + the default database. Switch Collection ----------------- diff --git a/docs/guide/defining-documents.rst b/docs/guide/defining-documents.rst index a457de1f0..df749ee1e 100644 --- a/docs/guide/defining-documents.rst +++ b/docs/guide/defining-documents.rst @@ -27,6 +27,8 @@ objects** as class attributes to the document class:: As BSON (the binary format for storing data in mongodb) is order dependent, documents are serialized based on their field order. +.. _dynamic-document-schemas: + Dynamic document schemas ======================== One of the benefits of MongoDB is dynamic schemas for a collection, whilst data @@ -111,6 +113,33 @@ arguments can be set on all fields: :attr:`db_field` (Default: None) The MongoDB field name. + If set, operations in MongoDB will be performed with this value instead of the class attribute. + + This allows you to use a different attribute than the name of the field used in MongoDB. :: + + from mongoengine import * + + class Page(Document): + page_number = IntField(db_field="pageNumber") + + # Create a Page and save it + Page(page_number=1).save() + + # How 'pageNumber' is stored in MongoDB + Page.objects.as_pymongo() # [{'_id': ObjectId('629dfc45ee4cc407b1586b1f'), 'pageNumber': 1}] + + # Retrieve the object + page: Page = Page.objects.first() + + print(page.page_number) # prints 1 + + print(page.pageNumber) # raises AttributeError + + .. note:: If set, use the name of the attribute when defining indexes in the :attr:`meta` + dictionary rather than the :attr:`db_field` otherwise, :class:`~mongoengine.LookUpError` + will be raised. + + :attr:`required` (Default: False) If set to True and the field is not set on the document instance, a :class:`~mongoengine.ValidationError` will be raised when the document is @@ -231,6 +260,9 @@ document class as the first argument:: comment2 = Comment(content='Nice article!') page = Page(comments=[comment1, comment2]) +Embedded documents can also leverage the flexibility of :ref:`dynamic-document-schemas:` +by inheriting :class:`~mongoengine.DynamicEmbeddedDocument`. + Dictionary Fields ----------------- Often, an embedded document may be used instead of a dictionary – generally @@ -336,7 +368,6 @@ supplying the :attr:`reverse_delete_rule` attributes on the :class:`ReferenceField` definition, like this:: class ProfilePage(Document): - ... employee = ReferenceField('Employee', reverse_delete_rule=mongoengine.CASCADE) The declaration in this example means that when an :class:`Employee` object is @@ -473,7 +504,7 @@ dictionary containing a full index definition. A direction may be specified on fields by prefixing the field name with a **+** (for ascending) or a **-** sign (for descending). Note that direction -only matters on multi-field indexes. Text indexes may be specified by prefixing +only matters on compound indexes. Text indexes may be specified by prefixing the field name with a **$**. Hashed indexes may be specified by prefixing the field name with a **#**:: @@ -484,14 +515,14 @@ the field name with a **#**:: created = DateTimeField() meta = { 'indexes': [ - 'title', + 'title', # single-field index '$title', # text index '#title', # hashed index - ('title', '-rating'), - ('category', '_cls'), + ('title', '-rating'), # compound index + ('category', '_cls'), # compound index { 'fields': ['created'], - 'expireAfterSeconds': 3600 + 'expireAfterSeconds': 3600 # ttl index } ] } @@ -543,6 +574,7 @@ There are a few top level defaults for all indexes that can be set:: 'index_background': True, 'index_cls': False, 'auto_create_index': True, + 'auto_create_index_on_save': False, } @@ -557,10 +589,15 @@ There are a few top level defaults for all indexes that can be set:: :attr:`auto_create_index` (Optional) When this is True (default), MongoEngine will ensure that the correct - indexes exist in MongoDB each time a command is run. This can be disabled + indexes exist in MongoDB when the Document is first used. This can be disabled in systems where indexes are managed separately. Disabling this will improve performance. +:attr:`auto_create_index_on_save` (Optional) + When this is True, MongoEngine will ensure that the correct + indexes exist in MongoDB each time :meth:`~mongoengine.document.Document.save` + is run. Enabling this will degrade performance. The default is False. This + option was added in version 0.25. Compound Indexes and Indexing sub documents ------------------------------------------- @@ -624,8 +661,8 @@ point. To create a geospatial index you must prefix the field with the ], } -Time To Live indexes --------------------- +Time To Live (TTL) indexes +-------------------------- A special index type that allows you to automatically expire data from a collection after a given period. See the official diff --git a/docs/guide/migration.rst b/docs/guide/migration.rst index ae4bb7c8b..ed982efc3 100644 --- a/docs/guide/migration.rst +++ b/docs/guide/migration.rst @@ -223,6 +223,47 @@ it is often useful for complex migrations of Document models. .. warning:: Be aware of this `flaw `_ if you modify documents while iterating +Example 4: Index removal +======================== + +If you remove an index from your Document class, or remove an indexed Field from your Document class, +you'll need to manually drop the corresponding index. MongoEngine will not do that for you. + +The way to deal with this case is to identify the name of the index to drop with `index_information()`, and then drop +it with `drop_index()` + +Let's for instance assume that you start with the following Document class + +.. code-block:: python + + class User(Document): + name = StringField(index=True) + + meta = {"indexes": ["name"]} + + User(name="John Doe").save() + +As soon as you start interacting with the Document collection (when `.save()` is called in this case), +it would create the following indexes: + +.. code-block:: python + + print(User._get_collection().index_information()) + # { + # '_id_': {'key': [('_id', 1)], 'v': 2}, + # 'name_1': {'background': False, 'key': [('name', 1)], 'v': 2}, + # } + +Thus: '_id' which is the default index and 'name_1' which is our custom index. +If you would remove the 'name' field or its index, you would have to call: + +.. code-block:: python + + User._get_collection().drop_index('name_1') + +.. note:: When adding new fields or new indexes, MongoEngine will take care of creating them + (unless `auto_create_index` is disabled) :: + Recommendations =============== diff --git a/docs/guide/mongomock.rst b/docs/guide/mongomock.rst index 141d7b69d..d9232055b 100644 --- a/docs/guide/mongomock.rst +++ b/docs/guide/mongomock.rst @@ -1,23 +1,28 @@ -============================== +========================= Use mongomock for testing -============================== +========================= -`mongomock `_ is a package to do just -what the name implies, mocking a mongo database. +Although we recommend running your tests against a regular MongoDB server, it is sometimes useful to plug +MongoEngine to alternative implementations (mongomock, montydb, mongita, etc). + +`mongomock `_ is historically the one suggested for MongoEngine and is +a package to do just what the name implies, mocking a mongo database. To use with mongoengine, simply specify mongomock when connecting with mongoengine: .. code-block:: python - connect('mongoenginetest', host='mongomock://localhost') + import mongomock + + connect('mongoenginetest', host='mongodb://localhost', mongo_client_class=mongomock.MongoClient) conn = get_connection() or with an alias: .. code-block:: python - connect('mongoenginetest', host='mongomock://localhost', alias='testdb') + connect('mongoenginetest', host='mongodb://localhost', mongo_client_class=mongomock.MongoClient, alias='testdb') conn = get_connection('testdb') Example of test file: @@ -34,7 +39,7 @@ Example of test file: @classmethod def setUpClass(cls): - connect('mongoenginetest', host='mongomock://localhost') + connect('mongoenginetest', host='mongodb://localhost', mongo_client_class=mongomock.MongoClient) @classmethod def tearDownClass(cls): diff --git a/docs/guide/querying.rst b/docs/guide/querying.rst index 7307b003e..b9afb60e1 100644 --- a/docs/guide/querying.rst +++ b/docs/guide/querying.rst @@ -86,6 +86,10 @@ expressions: * ``istartswith`` -- string field starts with value (case insensitive) * ``endswith`` -- string field ends with value * ``iendswith`` -- string field ends with value (case insensitive) +* ``wholeword`` -- string field contains whole word +* ``iwholeword`` -- string field contains whole word (case insensitive) +* ``regex`` -- string field match by regex +* ``iregex`` -- string field match by regex (case insensitive) * ``match`` -- performs an $elemMatch so you can match an entire document within an array @@ -192,6 +196,10 @@ you could use the following query:: Page.objects(tags__0='db') +The string queries operators can be used as well for querying a list field, e.g.:: + + Page.objects(tags__iexact='db') + If you only want to fetch part of a list eg: you want to paginate a list, then the `slice` operator is required:: @@ -215,12 +223,34 @@ However, this doesn't map well to the syntax so you can also use a capital S ins Raw queries ----------- It is possible to provide a raw :mod:`PyMongo` query as a query parameter, which will -be integrated directly into the query. This is done using the ``__raw__`` -keyword argument:: +be integrated directly into the query. This is done using the ``__raw__`` keyword argument:: Page.objects(__raw__={'tags': 'coding'}) -.. versionadded:: 0.4 +Similarly, a raw update can be provided to the :meth:`~mongoengine.queryset.QuerySet.update` method:: + + Page.objects(tags='coding').update(__raw__={'$set': {'tags': 'coding'}}) + +And the two can also be combined:: + + Page.objects(__raw__={'tags': 'coding'}).update(__raw__={'$set': {'tags': 'coding'}}) + + +Update with Aggregation Pipeline +-------------------------------- +It is possible to provide a raw :mod:`PyMongo` aggregation update parameter, which will +be integrated directly into the update. This is done by using ``__raw__`` keyword argument to the update method +and provide the pipeline as a list +`Update with Aggregation Pipeline `_ +:: + + # 'tags' field is set to 'coding is fun' + Page.objects(tags='coding').update(__raw__=[ + {"$set": {"tags": {"$concat": ["$tags", "is fun"]}}} + ], + ) + +.. versionadded:: 0.23.2 Sorting/Ordering results ======================== @@ -239,7 +269,7 @@ Limiting and skipping results Just as with traditional ORMs, you may limit the number of results returned or skip a number or results in you query. :meth:`~mongoengine.queryset.QuerySet.limit` and -:meth:`~mongoengine.queryset.QuerySet.skip` and methods are available on +:meth:`~mongoengine.queryset.QuerySet.skip` methods are available on :class:`~mongoengine.queryset.QuerySet` objects, but the `array-slicing` syntax is preferred for achieving this:: @@ -543,7 +573,10 @@ Documents may be updated atomically by using the There are several different "modifiers" that you may use with these methods: * ``set`` -- set a particular value +* ``set_on_insert`` -- set only if this is new document `need to add upsert=True`_ * ``unset`` -- delete a particular value (since MongoDB v1.3) +* ``max`` -- update only if value is bigger +* ``min`` -- update only if value is smaller * ``inc`` -- increment a value by a given amount * ``dec`` -- decrement a value by a given amount * ``push`` -- append a value to a list @@ -552,6 +585,7 @@ There are several different "modifiers" that you may use with these methods: * ``pull`` -- remove a value from a list * ``pull_all`` -- remove several values from a list * ``add_to_set`` -- add value to a list only if its not in the list already +* ``rename`` -- rename the key name .. _depending on the value: http://docs.mongodb.org/manual/reference/operator/update/pop/ diff --git a/docs/guide/validation.rst b/docs/guide/validation.rst index e5b9ed662..866adc95f 100644 --- a/docs/guide/validation.rst +++ b/docs/guide/validation.rst @@ -19,7 +19,7 @@ out of the box. Validation runs when calling `.validate()` or `.save()` .. code-block:: python - from mongoengine import Document, EmailField + from mongoengine import Document, EmailField, IntField class User(Document): email = EmailField() diff --git a/docs/requirements.txt b/docs/requirements.txt index dfda6a70b..4ecb0127d 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,5 @@ Sphinx==3.3.0 sphinx-rtd-theme==0.5.0 readthedocs-sphinx-ext==2.1.1 +docutils==0.17.1 +Jinja2<3.1 diff --git a/docs/upgrade.rst b/docs/upgrade.rst deleted file mode 100644 index 4e798dd4b..000000000 --- a/docs/upgrade.rst +++ /dev/null @@ -1,628 +0,0 @@ -######### -Upgrading -######### - -Development -*********** -(Fill this out whenever you introduce breaking changes to MongoEngine) - -URLField's constructor no longer takes `verify_exists` - -0.15.0 -****** - -0.14.0 -****** -This release includes a few bug fixes and a significant code cleanup. The most -important change is that `QuerySet.as_pymongo` no longer supports a -`coerce_types` mode. If you used it in the past, a) please let us know of your -use case, b) you'll need to override `as_pymongo` to get the desired outcome. - -This release also makes the EmbeddedDocument not hashable by default. If you -use embedded documents in sets or dictionaries, you might have to override -`__hash__` and implement a hashing logic specific to your use case. See #1528 -for the reason behind this change. - -0.13.0 -****** -This release adds Unicode support to the `EmailField` and changes its -structure significantly. Previously, email addresses containing Unicode -characters didn't work at all. Starting with v0.13.0, domains with Unicode -characters are supported out of the box, meaning some emails that previously -didn't pass validation now do. Make sure the rest of your application can -accept such email addresses. Additionally, if you subclassed the `EmailField` -in your application and overrode `EmailField.EMAIL_REGEX`, you will have to -adjust your code to override `EmailField.USER_REGEX`, `EmailField.DOMAIN_REGEX`, -and potentially `EmailField.UTF8_USER_REGEX`. - -0.12.0 -****** -This release includes various fixes for the `BaseQuerySet` methods and how they -are chained together. Since version 0.10.1 applying limit/skip/hint/batch_size -to an already-existing queryset wouldn't modify the underlying PyMongo cursor. -This has been fixed now, so you'll need to make sure that your code didn't rely -on the broken implementation. - -Additionally, a public `BaseQuerySet.clone_into` has been renamed to a private -`_clone_into`. If you directly used that method in your code, you'll need to -rename its occurrences. - -0.11.0 -****** -This release includes a major rehaul of MongoEngine's code quality and -introduces a few breaking changes. It also touches many different parts of -the package and although all the changes have been tested and scrutinized, -you're encouraged to thoroughly test the upgrade. - -First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. -If you import or catch this exception, you'll need to rename it in your code. - -Second breaking change drops Python v2.6 support. If you run MongoEngine on -that Python version, you'll need to upgrade it first. - -Third breaking change drops an old backward compatibility measure where -`from mongoengine.base import ErrorClass` would work on top of -`from mongoengine.errors import ErrorClass` (where `ErrorClass` is e.g. -`ValidationError`). If you import any exceptions from `mongoengine.base`, -change it to `mongoengine.errors`. - -0.10.8 -****** -This version fixed an issue where specifying a MongoDB URI host would override -more information than it should. These changes are minor, but they still -subtly modify the connection logic and thus you're encouraged to test your -MongoDB connection before shipping v0.10.8 in production. - -0.10.7 -****** - -`QuerySet.aggregate_sum` and `QuerySet.aggregate_average` are dropped. Use -`QuerySet.sum` and `QuerySet.average` instead which use the aggreation framework -by default from now on. - -0.9.0 -***** - -The 0.8.7 package on pypi was corrupted. If upgrading from 0.8.7 to 0.9.0 please follow: :: - - python -m pip uninstall pymongo - python -m pip uninstall mongoengine - python -m pip install pymongo==2.8 - python -m pip install mongoengine - -0.8.7 -***** - -Calling reload on deleted / nonexistent documents now raises a DoesNotExist -exception. - - -0.8.2 to 0.8.3 -************** - -Minor change that may impact users: - -DynamicDocument fields are now stored in creation order after any declared -fields. Previously they were stored alphabetically. - - -0.7 to 0.8 -********** - -There have been numerous backwards breaking changes in 0.8. The reasons for -these are to ensure that MongoEngine has sane defaults going forward and that it -performs the best it can out of the box. Where possible there have been -FutureWarnings to help get you ready for the change, but that hasn't been -possible for the whole of the release. - -.. warning:: Breaking changes - test upgrading on a test system before putting - live. There maybe multiple manual steps in migrating and these are best honed - on a staging / test system. - -Python and PyMongo -================== - -MongoEngine requires python 2.6 (or above) and pymongo 2.5 (or above) - -Data Model -========== - -Inheritance ------------ - -The inheritance model has changed, we no longer need to store an array of -:attr:`types` with the model we can just use the classname in :attr:`_cls`. -This means that you will have to update your indexes for each of your -inherited classes like so: :: - - # 1. Declaration of the class - class Animal(Document): - name = StringField() - meta = { - 'allow_inheritance': True, - 'indexes': ['name'] - } - - # 2. Remove _types - collection = Animal._get_collection() - collection.update({}, {"$unset": {"_types": 1}}, multi=True) - - # 3. Confirm extra data is removed - count = collection.find({'_types': {"$exists": True}}).count() - assert count == 0 - - # 4. Remove indexes - info = collection.index_information() - indexes_to_drop = [key for key, value in info.items() - if '_types' in dict(value['key'])] - for index in indexes_to_drop: - collection.drop_index(index) - - # 5. Recreate indexes - Animal.ensure_indexes() - - -Document Definition -------------------- - -The default for inheritance has changed - it is now off by default and -:attr:`_cls` will not be stored automatically with the class. So if you extend -your :class:`~mongoengine.Document` or :class:`~mongoengine.EmbeddedDocuments` -you will need to declare :attr:`allow_inheritance` in the meta data like so: :: - - class Animal(Document): - name = StringField() - - meta = {'allow_inheritance': True} - -Previously, if you had data in the database that wasn't defined in the Document -definition, it would set it as an attribute on the document. This is no longer -the case and the data is set only in the ``document._data`` dictionary: :: - - >>> from mongoengine import * - >>> class Animal(Document): - ... name = StringField() - ... - >>> cat = Animal(name="kit", size="small") - - # 0.7 - >>> cat.size - u'small' - - # 0.8 - >>> cat.size - Traceback (most recent call last): - File "", line 1, in - AttributeError: 'Animal' object has no attribute 'size' - -The Document class has introduced a reserved function `clean()`, which will be -called before saving the document. If your document class happens to have a method -with the same name, please try to rename it. - - def clean(self): - pass - -ReferenceField --------------- - -ReferenceFields now store ObjectIds by default - this is more efficient than -DBRefs as we already know what Document types they reference:: - - # Old code - class Animal(Document): - name = ReferenceField('self') - - # New code to keep dbrefs - class Animal(Document): - name = ReferenceField('self', dbref=True) - -To migrate all the references you need to touch each object and mark it as dirty -eg:: - - # Doc definition - class Person(Document): - name = StringField() - parent = ReferenceField('self') - friends = ListField(ReferenceField('self')) - - # Mark all ReferenceFields as dirty and save - for p in Person.objects: - p._mark_as_changed('parent') - p._mark_as_changed('friends') - p.save() - -`An example test migration for ReferenceFields is available on github -`_. - -.. Note:: Internally mongoengine handles ReferenceFields the same, so they are - converted to DBRef on loading and ObjectIds or DBRefs depending on settings - on storage. - -UUIDField ---------- - -UUIDFields now default to storing binary values:: - - # Old code - class Animal(Document): - uuid = UUIDField() - - # New code - class Animal(Document): - uuid = UUIDField(binary=False) - -To migrate all the uuids you need to touch each object and mark it as dirty -eg:: - - # Doc definition - class Animal(Document): - uuid = UUIDField() - - # Mark all UUIDFields as dirty and save - for a in Animal.objects: - a._mark_as_changed('uuid') - a.save() - -`An example test migration for UUIDFields is available on github -`_. - -DecimalField ------------- - -DecimalFields now store floats - previously it was storing strings and that -made it impossible to do comparisons when querying correctly.:: - - # Old code - class Person(Document): - balance = DecimalField() - - # New code - class Person(Document): - balance = DecimalField(force_string=True) - -To migrate all the DecimalFields you need to touch each object and mark it as dirty -eg:: - - # Doc definition - class Person(Document): - balance = DecimalField() - - # Mark all DecimalField's as dirty and save - for p in Person.objects: - p._mark_as_changed('balance') - p.save() - -.. note:: DecimalFields have also been improved with the addition of precision - and rounding. See :class:`~mongoengine.fields.DecimalField` for more information. - -`An example test migration for DecimalFields is available on github -`_. - -Cascading Saves ---------------- -To improve performance document saves will no longer automatically cascade. -Any changes to a Document's references will either have to be saved manually or -you will have to explicitly tell it to cascade on save:: - - # At the class level: - class Person(Document): - meta = {'cascade': True} - - # Or on save: - my_document.save(cascade=True) - -Storage -------- - -Document and Embedded Documents are now serialized based on declared field order. -Previously, the data was passed to mongodb as a dictionary and which meant that -order wasn't guaranteed - so things like ``$addToSet`` operations on -:class:`~mongoengine.EmbeddedDocument` could potentially fail in unexpected -ways. - -If this impacts you, you may want to rewrite the objects using the -``doc.mark_as_dirty('field')`` pattern described above. If you are using a -compound primary key then you will need to ensure the order is fixed and match -your EmbeddedDocument to that order. - -Querysets -========= - -Attack of the clones --------------------- - -Querysets now return clones and should no longer be considered editable in -place. This brings us in line with how Django's querysets work and removes a -long running gotcha. If you edit your querysets inplace you will have to -update your code like so: :: - - # Old code: - mammals = Animal.objects(type="mammal") - mammals.filter(order="Carnivora") # Returns a cloned queryset that isn't assigned to anything - so this will break in 0.8 - [m for m in mammals] # This will return all mammals in 0.8 as the 2nd filter returned a new queryset - - # Update example a) assign queryset after a change: - mammals = Animal.objects(type="mammal") - carnivores = mammals.filter(order="Carnivora") # Reassign the new queryset so filter can be applied - [m for m in carnivores] # This will return all carnivores - - # Update example b) chain the queryset: - mammals = Animal.objects(type="mammal").filter(order="Carnivora") # The final queryset is assgined to mammals - [m for m in mammals] # This will return all carnivores - -Len iterates the queryset -------------------------- - -If you ever did `len(queryset)` it previously did a `count()` under the covers, -this caused some unusual issues. As `len(queryset)` is most often used by -`list(queryset)` we now cache the queryset results and use that for the length. - -This isn't as performant as a `count()` and if you aren't iterating the -queryset you should upgrade to use count:: - - # Old code - len(Animal.objects(type="mammal")) - - # New code - Animal.objects(type="mammal").count() - - -.only() now inline with .exclude() ----------------------------------- - -The behaviour of `.only()` was highly ambiguous, now it works in mirror fashion -to `.exclude()`. Chaining `.only()` calls will increase the fields required:: - - # Old code - Animal.objects().only(['type', 'name']).only('name', 'order') # Would have returned just `name` - - # New code - Animal.objects().only('name') - - # Note: - Animal.objects().only(['name']).only('order') # Now returns `name` *and* `order` - - -Client -====== -PyMongo 2.4 came with a new connection client; MongoClient_ and started the -depreciation of the old :class:`~pymongo.connection.Connection`. MongoEngine -now uses the latest `MongoClient` for connections. By default operations were -`safe` but if you turned them off or used the connection directly this will -impact your queries. - -Querysets ---------- - -Safe -^^^^ - -`safe` has been depreciated in the new MongoClient connection. Please use -`write_concern` instead. As `safe` always defaulted as `True` normally no code -change is required. To disable confirmation of the write just pass `{"w": 0}` -eg: :: - - # Old - Animal(name="Dinasour").save(safe=False) - - # new code: - Animal(name="Dinasour").save(write_concern={"w": 0}) - -Write Concern -^^^^^^^^^^^^^ - -`write_options` has been replaced with `write_concern` to bring it inline with -pymongo. To upgrade simply rename any instances where you used the `write_option` -keyword to `write_concern` like so:: - - # Old code: - Animal(name="Dinasour").save(write_options={"w": 2}) - - # new code: - Animal(name="Dinasour").save(write_concern={"w": 2}) - - -Indexes -======= - -Index methods are no longer tied to querysets but rather to the document class. -Although `QuerySet._ensure_indexes` and `QuerySet.ensure_index` still exist. -They should be replaced with :func:`~mongoengine.Document.ensure_indexes` / -:func:`~mongoengine.Document.ensure_index`. - -SequenceFields -============== - -:class:`~mongoengine.fields.SequenceField` now inherits from `BaseField` to -allow flexible storage of the calculated value. As such MIN and MAX settings -are no longer handled. - -.. _MongoClient: http://blog.mongodb.org/post/36666163412/introducing-mongoclient - -0.6 to 0.7 -********** - -Cascade saves -============= - -Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set -to True. This is because in 0.8 it will default to False. If you require -cascading saves then either set it in the `meta` or pass -via `save` eg :: - - # At the class level: - class Person(Document): - meta = {'cascade': True} - - # Or in code: - my_document.save(cascade=True) - -.. note:: - Remember: cascading saves **do not** cascade through lists. - -ReferenceFields -=============== - -ReferenceFields now can store references as ObjectId strings instead of DBRefs. -This will become the default in 0.8 and if `dbref` is not set a `FutureWarning` -will be raised. - - -To explicitly continue to use DBRefs change the `dbref` flag -to True :: - - class Person(Document): - groups = ListField(ReferenceField(Group, dbref=True)) - -To migrate to using strings instead of DBRefs you will have to manually -migrate :: - - # Step 1 - Migrate the model definition - class Group(Document): - author = ReferenceField(User, dbref=False) - members = ListField(ReferenceField(User, dbref=False)) - - # Step 2 - Migrate the data - for g in Group.objects(): - g.author = g.author - g.members = g.members - g.save() - - -item_frequencies -================ - -In the 0.6 series we added support for null / zero / false values in -item_frequencies. A side effect was to return keys in the value they are -stored in rather than as string representations. Your code may need to be -updated to handle native types rather than strings keys for the results of -item frequency queries. - -BinaryFields -============ - -Binary fields have been updated so that they are native binary types. If you -previously were doing `str` comparisons with binary field values you will have -to update and wrap the value in a `str`. - -0.5 to 0.6 -********** - -Embedded Documents - if you had a `pk` field you will have to rename it from -`_id` to `pk` as pk is no longer a property of Embedded Documents. - -Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw -an InvalidDocument error as they aren't currently supported. - -Document._get_subclasses - Is no longer used and the class method has been -removed. - -Document.objects.with_id - now raises an InvalidQueryError if used with a -filter. - -FutureWarning - A future warning has been added to all inherited classes that -don't define :attr:`allow_inheritance` in their meta. - -You may need to update pyMongo to 2.0 for use with Sharding. - -0.4 to 0.5 -********** - -There have been the following backwards incompatibilities from 0.4 to 0.5. The -main areas of changed are: choices in fields, map_reduce and collection names. - -Choice options: -=============== - -Are now expected to be an iterable of tuples, with the first element in each -tuple being the actual value to be stored. The second element is the -human-readable name for the option. - - -PyMongo / MongoDB -================= - -map reduce now requires pymongo 1.11+- The pymongo `merge_output` and -`reduce_output` parameters, have been depreciated. - -More methods now use map_reduce as db.eval is not supported for sharding as -such the following have been changed: - - * :meth:`~mongoengine.queryset.QuerySet.sum` - * :meth:`~mongoengine.queryset.QuerySet.average` - * :meth:`~mongoengine.queryset.QuerySet.item_frequencies` - - -Default collection naming -========================= - -Previously it was just lowercase, it's now much more pythonic and readable as -it's lowercase and underscores, previously :: - - class MyAceDocument(Document): - pass - - MyAceDocument._meta['collection'] == myacedocument - -In 0.5 this will change to :: - - class MyAceDocument(Document): - pass - - MyAceDocument._get_collection_name() == my_ace_document - -To upgrade use a Mixin class to set meta like so :: - - class BaseMixin(object): - meta = { - 'collection': lambda c: c.__name__.lower() - } - - class MyAceDocument(Document, BaseMixin): - pass - - MyAceDocument._get_collection_name() == "myacedocument" - -Alternatively, you can rename your collections eg :: - - from mongoengine.connection import _get_db - from mongoengine.base import _document_registry - - def rename_collections(): - db = _get_db() - - failure = False - - collection_names = [d._get_collection_name() - for d in _document_registry.values()] - - for new_style_name in collection_names: - if not new_style_name: # embedded documents don't have collections - continue - old_style_name = new_style_name.replace('_', '') - - if old_style_name == new_style_name: - continue # Nothing to do - - existing = db.collection_names() - if old_style_name in existing: - if new_style_name in existing: - failure = True - print "FAILED to rename: %s to %s (already exists)" % ( - old_style_name, new_style_name) - else: - db[old_style_name].rename(new_style_name) - print "Renamed: %s to %s" % (old_style_name, - new_style_name) - - if failure: - print "Upgrading collection names failed" - else: - print "Upgraded collection names" - - -mongodb 1.8 > 2.0 + -=================== - -It's been reported that indexes may need to be recreated to the newer version of indexes. -To do this drop indexes and call ``ensure_indexes`` on each model. diff --git a/mongoengine/__init__.py b/mongoengine/__init__.py index d316b0f14..a42755bb4 100644 --- a/mongoengine/__init__.py +++ b/mongoengine/__init__.py @@ -29,7 +29,7 @@ ) -VERSION = (0, 23, 1) +VERSION = (0, 27, 0) def get_version(): diff --git a/mongoengine/base/datastructures.py b/mongoengine/base/datastructures.py index a32f6040a..a3561b8b1 100644 --- a/mongoengine/base/datastructures.py +++ b/mongoengine/base/datastructures.py @@ -31,9 +31,9 @@ def mark_key_as_changed_wrapper(parent_method): def wrapper(self, key, *args, **kwargs): # Can't use super() in the decorator. - result = parent_method(self, key, *args, **kwargs) - self._mark_as_changed(key) - return result + if not args or key not in self or self[key] != args[0]: + self._mark_as_changed(key) + return parent_method(self, key, *args, **kwargs) return wrapper diff --git a/mongoengine/base/document.py b/mongoengine/base/document.py index 46935c1b8..e5311c5dd 100644 --- a/mongoengine/base/document.py +++ b/mongoengine/base/document.py @@ -1,5 +1,6 @@ import copy import numbers +import warnings from functools import partial import pymongo @@ -23,11 +24,17 @@ OperationError, ValidationError, ) +from mongoengine.pymongo_support import LEGACY_JSON_OPTIONS __all__ = ("BaseDocument", "NON_FIELD_ERRORS") NON_FIELD_ERRORS = "__all__" +try: + GEOHAYSTACK = pymongo.GEOHAYSTACK +except AttributeError: + GEOHAYSTACK = None + class BaseDocument: # TODO simplify how `_changed_fields` is used. @@ -154,7 +161,6 @@ def __delattr__(self, *args, **kwargs): def __setattr__(self, name, value): # Handle dynamic data only if an initialised dynamic document if self._dynamic and not self._dynamic_lock: - if name not in self._fields_ordered and not name.startswith("_"): DynamicField = _import_class("DynamicField") field = DynamicField(db_field=name, null=True) @@ -365,7 +371,7 @@ def to_mongo(self, use_db_field=True, fields=None): value = field.generate() self._data[field_name] = value - if (value is not None) or (field.null): + if value is not None or field.null: if use_db_field: data[field.db_field] = value else: @@ -439,10 +445,20 @@ def to_json(self, *args, **kwargs): Defaults to True. """ use_db_field = kwargs.pop("use_db_field", True) + if "json_options" not in kwargs: + warnings.warn( + "No 'json_options' are specified! Falling back to " + "LEGACY_JSON_OPTIONS with uuid_representation=PYTHON_LEGACY. " + "For use with other MongoDB drivers specify the UUID " + "representation to use. This will be changed to " + "uuid_representation=UNSPECIFIED in a future release.", + DeprecationWarning, + ) + kwargs["json_options"] = LEGACY_JSON_OPTIONS return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs) @classmethod - def from_json(cls, json_data, created=False): + def from_json(cls, json_data, created=False, **kwargs): """Converts json data to a Document instance :param str json_data: The json data to load into the Document @@ -460,7 +476,17 @@ def from_json(cls, json_data, created=False): # TODO should `created` default to False? If the object already exists # in the DB, you would likely retrieve it from MongoDB itself through # a query, not load it from JSON data. - return cls._from_son(json_util.loads(json_data), created=created) + if "json_options" not in kwargs: + warnings.warn( + "No 'json_options' are specified! Falling back to " + "LEGACY_JSON_OPTIONS with uuid_representation=PYTHON_LEGACY. " + "For use with other MongoDB drivers specify the UUID " + "representation to use. This will be changed to " + "uuid_representation=UNSPECIFIED in a future release.", + DeprecationWarning, + ) + kwargs["json_options"] = LEGACY_JSON_OPTIONS + return cls._from_son(json_util.loads(json_data, **kwargs), created=created) def __expand_dynamic_values(self, name, value): """Expand any dynamic values to their correct types / values.""" @@ -492,9 +518,6 @@ def __expand_dynamic_values(self, name, value): def _mark_as_changed(self, key): """Mark a key as explicitly changed by the user.""" - if not key: - return - if not hasattr(self, "_changed_fields"): return @@ -898,7 +921,10 @@ def _build_index_spec(cls, spec): elif key.startswith("("): direction = pymongo.GEOSPHERE elif key.startswith(")"): - direction = pymongo.GEOHAYSTACK + try: + direction = pymongo.GEOHAYSTACK + except AttributeError: + raise NotImplementedError elif key.startswith("*"): direction = pymongo.GEO2D if key.startswith(("+", "-", "*", "$", "#", "(", ")")): @@ -923,10 +949,10 @@ def _build_index_spec(cls, spec): index_list.append((key, direction)) # Don't add cls to a geo index - if include_cls and direction not in ( - pymongo.GEO2D, - pymongo.GEOHAYSTACK, - pymongo.GEOSPHERE, + if ( + include_cls + and direction not in (pymongo.GEO2D, pymongo.GEOSPHERE) + and (GEOHAYSTACK is None or direction != GEOHAYSTACK) ): index_list.insert(0, ("_cls", 1)) diff --git a/mongoengine/base/fields.py b/mongoengine/base/fields.py index a68035274..037e916ff 100644 --- a/mongoengine/base/fields.py +++ b/mongoengine/base/fields.py @@ -52,20 +52,20 @@ def __init__( :param required: If the field is required. Whether it has to have a value or not. Defaults to False. :param default: (optional) The default value for this field if no value - has been set (or if the value has been unset). It can be a + has been set, if the value is set to None or has been unset. It can be a callable. - :param unique: Is the field value unique or not. Defaults to False. + :param unique: Is the field value unique or not (Creates an index). Defaults to False. :param unique_with: (optional) The other field this field should be - unique with. - :param primary_key: Mark this field as the primary key. Defaults to False. + unique with (Creates an index). + :param primary_key: Mark this field as the primary key ((Creates an index)). Defaults to False. :param validation: (optional) A callable to validate the value of the - field. The callable takes the value as parameter and should raise + field. The callable takes the value as parameter and should raise a ValidationError if validation fails :param choices: (optional) The valid choices - :param null: (optional) If the field value can be null. If no and there is a default value - then the default value is set + :param null: (optional) If the field value can be null when a default exists. If not set, the default value + will be used in case a field with a default value is set to None. Defaults to False. :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` - means that uniqueness won't be enforced for `None` values + means that uniqueness won't be enforced for `None` values (Creates an index). Defaults to False. :param **kwargs: (optional) Arbitrary indirection-free metadata for this field can be supplied as additional keyword arguments and accessed as attributes of the field. Must not conflict with any @@ -282,6 +282,18 @@ def _lazy_load_refs(instance, name, ref_values, *, max_depth): ) return documents + def __set__(self, instance, value): + # Some fields e.g EnumField are converted upon __set__ + # So it is fair to mimic the same behavior when using e.g ListField(EnumField) + EnumField = _import_class("EnumField") + if self.field and isinstance(self.field, EnumField): + if isinstance(value, (list, tuple)): + value = [self.field.to_python(sub_val) for sub_val in value] + elif isinstance(value, dict): + value = {key: self.field.to_python(sub) for key, sub in value.items()} + + return super().__set__(instance, value) + def __get__(self, instance, owner): """Descriptor to automatically dereference references.""" if instance is None: @@ -434,12 +446,12 @@ def to_mongo(self, value, use_db_field=True, fields=None): " have been saved to the database" ) - # If its a document that is not inheritable it won't have + # If it's a document that is not inheritable it won't have # any _cls data so make it a generic reference allows # us to dereference meta = getattr(v, "_meta", {}) allow_inheritance = meta.get("allow_inheritance") - if not allow_inheritance and not self.field: + if not allow_inheritance: value_dict[k] = GenericReferenceField().to_mongo(v) else: collection = v._get_collection_name() @@ -509,14 +521,17 @@ def to_python(self, value): return value def to_mongo(self, value): - if not isinstance(value, ObjectId): - try: - return ObjectId(str(value)) - except Exception as e: - self.error(str(e)) - return value + if isinstance(value, ObjectId): + return value + + try: + return ObjectId(str(value)) + except Exception as e: + self.error(str(e)) def prepare_query_value(self, op, value): + if value is None: + return value return self.to_mongo(value) def validate(self, value): diff --git a/mongoengine/base/metaclasses.py b/mongoengine/base/metaclasses.py index 072b3aada..36ce47c3b 100644 --- a/mongoengine/base/metaclasses.py +++ b/mongoengine/base/metaclasses.py @@ -178,7 +178,6 @@ def __new__(mcs, name, bases, attrs): f.owner_document = new_class delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING) if isinstance(f, CachedReferenceField): - if issubclass(new_class, EmbeddedDocument): raise InvalidDocumentError( "CachedReferenceFields is not allowed in EmbeddedDocuments" diff --git a/mongoengine/connection.py b/mongoengine/connection.py index 11cd93083..27a2970f7 100644 --- a/mongoengine/connection.py +++ b/mongoengine/connection.py @@ -1,6 +1,21 @@ +import warnings + from pymongo import MongoClient, ReadPreference, uri_parser +from pymongo.common import ( + _UUID_REPRESENTATIONS, + _CaseInsensitiveDictionary, +) from pymongo.database import _check_name +# DriverInfo was added in PyMongo 3.7. +try: + from pymongo.driver_info import DriverInfo +except ImportError: + DriverInfo = None + +import mongoengine +from mongoengine.pymongo_support import PYMONGO_VERSION + __all__ = [ "DEFAULT_CONNECTION_NAME", "DEFAULT_DATABASE_NAME", @@ -54,24 +69,26 @@ def _get_connection_settings( password=None, authentication_source=None, authentication_mechanism=None, + authmechanismproperties=None, **kwargs, ): """Get the connection settings as a dict - : param db: the name of the database to use, for compatibility with connect - : param name: the name of the specific database to use - : param host: the host name of the: program: `mongod` instance to connect to - : param port: the port that the: program: `mongod` instance is running on - : param read_preference: The read preference for the collection - : param username: username to authenticate with - : param password: password to authenticate with - : param authentication_source: database to authenticate against - : param authentication_mechanism: database authentication mechanisms. + :param db: the name of the database to use, for compatibility with connect + :param name: the name of the specific database to use + :param host: the host name of the: program: `mongod` instance to connect to + :param port: the port that the: program: `mongod` instance is running on + :param read_preference: The read preference for the collection + :param username: username to authenticate with + :param password: password to authenticate with + :param authentication_source: database to authenticate against + :param authentication_mechanism: database authentication mechanisms. By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, MONGODB-CR (MongoDB Challenge Response protocol) for older servers. - : param is_mock: explicitly use mongomock for this connection - (can also be done by using `mongomock: // ` as db host prefix) - : param kwargs: ad-hoc parameters to be passed into the pymongo driver, + :param mongo_client_class: using alternative connection client other than + pymongo.MongoClient, e.g. mongomock, montydb, that provides pymongo alike + interface but not necessarily for connecting to a real mongo instance. + :param kwargs: ad-hoc parameters to be passed into the pymongo driver, for example maxpoolsize, tz_aware, etc. See the documentation for pymongo's `MongoClient` for a full list. """ @@ -84,6 +101,7 @@ def _get_connection_settings( "password": password, "authentication_source": authentication_source, "authentication_mechanism": authentication_mechanism, + "authmechanismproperties": authmechanismproperties, } _check_db_name(conn_settings["name"]) @@ -95,23 +113,17 @@ def _get_connection_settings( resolved_hosts = [] for entity in conn_host: - - # Handle Mongomock - if entity.startswith("mongomock://"): - conn_settings["is_mock"] = True - # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` - new_entity = entity.replace("mongomock://", "mongodb://", 1) - resolved_hosts.append(new_entity) - - uri_dict = uri_parser.parse_uri(new_entity) - - database = uri_dict.get("database") - if database: - conn_settings["name"] = database + # Reject old mongomock integration + # To be removed in a few versions after 0.27.0 + if entity.startswith("mongomock://") or kwargs.get("is_mock"): + raise Exception( + "Use of mongomock:// URI or 'is_mock' were removed in favor of 'mongo_client_class=mongomock.MongoClient'. " + "Check the CHANGELOG for more info" + ) # Handle URI style connections, only updating connection params which # were explicitly specified in the URI. - elif "://" in entity: + if "://" in entity: uri_dict = uri_parser.parse_uri(entity) resolved_hosts.append(entity) @@ -123,7 +135,7 @@ def _get_connection_settings( if uri_dict.get(param): conn_settings[param] = uri_dict[param] - uri_options = uri_dict["options"] + uri_options: _CaseInsensitiveDictionary = uri_dict["options"] if "replicaset" in uri_options: conn_settings["replicaSet"] = uri_options["replicaset"] if "authsource" in uri_options: @@ -152,8 +164,27 @@ def _get_connection_settings( preference.name.lower() == read_pf_mode or preference.mode == read_pf_mode ): - conn_settings["read_preference"] = preference + ReadPrefClass = preference.__class__ break + + if "readpreferencetags" in uri_options: + conn_settings["read_preference"] = ReadPrefClass( + tag_sets=uri_options["readpreferencetags"] + ) + else: + conn_settings["read_preference"] = ReadPrefClass() + + if "authmechanismproperties" in uri_options: + conn_settings["authmechanismproperties"] = uri_options[ + "authmechanismproperties" + ] + if "uuidrepresentation" in uri_options: + REV_UUID_REPRESENTATIONS = { + v: k for k, v in _UUID_REPRESENTATIONS.items() + } + conn_settings["uuidrepresentation"] = REV_UUID_REPRESENTATIONS[ + uri_options["uuidrepresentation"] + ] else: resolved_hosts.append(entity) conn_settings["host"] = resolved_hosts @@ -162,6 +193,21 @@ def _get_connection_settings( kwargs.pop("slaves", None) kwargs.pop("is_slave", None) + keys = { + key.lower() for key in kwargs.keys() + } # pymongo options are case insensitive + if "uuidrepresentation" not in keys and "uuidrepresentation" not in conn_settings: + warnings.warn( + "No uuidRepresentation is specified! Falling back to " + "'pythonLegacy' which is the default for pymongo 3.x. " + "For compatibility with other MongoDB drivers this should be " + "specified as 'standard' or '{java,csharp}Legacy' to work with " + "older drivers in those languages. This will be changed to " + "'unspecified' in a future release.", + DeprecationWarning, + ) + kwargs["uuidRepresentation"] = "pythonLegacy" + conn_settings.update(kwargs) return conn_settings @@ -177,26 +223,27 @@ def register_connection( password=None, authentication_source=None, authentication_mechanism=None, + authmechanismproperties=None, **kwargs, ): """Register the connection settings. - : param alias: the name that will be used to refer to this connection - throughout MongoEngine - : param db: the name of the database to use, for compatibility with connect - : param name: the name of the specific database to use - : param host: the host name of the: program: `mongod` instance to connect to - : param port: the port that the: program: `mongod` instance is running on - : param read_preference: The read preference for the collection - : param username: username to authenticate with - : param password: password to authenticate with - : param authentication_source: database to authenticate against - : param authentication_mechanism: database authentication mechanisms. + :param alias: the name that will be used to refer to this connection throughout MongoEngine + :param db: the name of the database to use, for compatibility with connect + :param name: the name of the specific database to use + :param host: the host name of the: program: `mongod` instance to connect to + :param port: the port that the: program: `mongod` instance is running on + :param read_preference: The read preference for the collection + :param username: username to authenticate with + :param password: password to authenticate with + :param authentication_source: database to authenticate against + :param authentication_mechanism: database authentication mechanisms. By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, MONGODB-CR (MongoDB Challenge Response protocol) for older servers. - : param is_mock: explicitly use mongomock for this connection - (can also be done by using `mongomock: // ` as db host prefix) - : param kwargs: ad-hoc parameters to be passed into the pymongo driver, + :param mongo_client_class: using alternative connection client other than + pymongo.MongoClient, e.g. mongomock, montydb, that provides pymongo alike + interface but not necessarily for connecting to a real mongo instance. + :param kwargs: ad-hoc parameters to be passed into the pymongo driver, for example maxpoolsize, tz_aware, etc. See the documentation for pymongo's `MongoClient` for a full list. """ @@ -210,6 +257,7 @@ def register_connection( password=password, authentication_source=authentication_source, authentication_mechanism=authentication_mechanism, + authmechanismproperties=authmechanismproperties, **kwargs, ) _connection_settings[alias] = conn_settings @@ -220,9 +268,15 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME): from mongoengine import Document from mongoengine.base.common import _get_documents_by_db - if alias in _connections: - get_connection(alias=alias).close() - del _connections[alias] + connection = _connections.pop(alias, None) + if connection: + # MongoEngine may share the same MongoClient across multiple aliases + # if connection settings are the same so we only close + # the client if we're removing the final reference. + # Important to use 'is' instead of '==' because clients connected to the same cluster + # will compare equal even with different options + if all(connection is not c for c in _connections.values()): + connection.close() if alias in _dbs: # Detach all cached collections in Documents @@ -264,15 +318,26 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): raise ConnectionFailure(msg) def _clean_settings(settings_dict): - irrelevant_fields_set = { - "name", - "username", - "password", - "authentication_source", - "authentication_mechanism", - } + if PYMONGO_VERSION < (4,): + irrelevant_fields_set = { + "name", + "username", + "password", + "authentication_source", + "authentication_mechanism", + "authmechanismproperties", + } + rename_fields = {} + else: + irrelevant_fields_set = {"name"} + rename_fields = { + "authentication_source": "authSource", + "authentication_mechanism": "authMechanism", + } return { - k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set + rename_fields.get(k, k): v + for k, v in settings_dict.items() + if k not in irrelevant_fields_set and v is not None } raw_conn_settings = _connection_settings[alias].copy() @@ -281,17 +346,16 @@ def _clean_settings(settings_dict): # alias and remove the database name and authentication info (we don't # care about them at this point). conn_settings = _clean_settings(raw_conn_settings) + if DriverInfo is not None: + conn_settings.setdefault( + "driver", DriverInfo("MongoEngine", mongoengine.__version__) + ) # Determine if we should use PyMongo's or mongomock's MongoClient. - is_mock = conn_settings.pop("is_mock", False) - if is_mock: - try: - import mongomock - except ImportError: - raise RuntimeError("You need mongomock installed to mock MongoEngine.") - connection_class = mongomock.MongoClient + if "mongo_client_class" in conn_settings: + mongo_client_class = conn_settings.pop("mongo_client_class") else: - connection_class = MongoClient + mongo_client_class = MongoClient # Re-use existing connection if one is suitable. existing_connection = _find_existing_connection(raw_conn_settings) @@ -299,19 +363,19 @@ def _clean_settings(settings_dict): connection = existing_connection else: connection = _create_connection( - alias=alias, connection_class=connection_class, **conn_settings + alias=alias, mongo_client_class=mongo_client_class, **conn_settings ) _connections[alias] = connection return _connections[alias] -def _create_connection(alias, connection_class, **connection_settings): +def _create_connection(alias, mongo_client_class, **connection_settings): """ Create the new connection for this alias. Raise ConnectionFailure if it can't be established. """ try: - return connection_class(**connection_settings) + return mongo_client_class(**connection_settings) except Exception as e: raise ConnectionFailure(f"Cannot connect to database {alias} :\n{e}") @@ -352,14 +416,19 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): conn = get_connection(alias) conn_settings = _connection_settings[alias] db = conn[conn_settings["name"]] - auth_kwargs = {"source": conn_settings["authentication_source"]} - if conn_settings["authentication_mechanism"] is not None: - auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] # Authenticate if necessary - if conn_settings["username"] and ( - conn_settings["password"] - or conn_settings["authentication_mechanism"] == "MONGODB-X509" + if ( + PYMONGO_VERSION < (4,) + and conn_settings["username"] + and ( + conn_settings["password"] + or conn_settings["authentication_mechanism"] == "MONGODB-X509" + ) + and conn_settings["authmechanismproperties"] is None ): + auth_kwargs = {"source": conn_settings["authentication_source"]} + if conn_settings["authentication_mechanism"] is not None: + auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] db.authenticate( conn_settings["username"], conn_settings["password"], **auth_kwargs ) diff --git a/mongoengine/context_managers.py b/mongoengine/context_managers.py index 257d27f82..eb9c99622 100644 --- a/mongoengine/context_managers.py +++ b/mongoengine/context_managers.py @@ -177,14 +177,28 @@ class query_counter: This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes can interfere with it + Usage: + + .. code-block:: python + + class User(Document): + name = StringField() + + with query_counter() as q: + user = User(name='Bob') + assert q == 0 # no query fired yet + user.save() + assert q == 1 # 1 query was fired, an 'insert' + user_bis = User.objects().first() + assert q == 2 # a 2nd query was fired, a 'find_one' + Be aware that: - - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of - documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) + + - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) - Some queries are ignored by default by the counter (killcursors, db.system.indexes) """ def __init__(self, alias=DEFAULT_CONNECTION_NAME): - """Construct the query_counter""" self.db = get_db(alias=alias) self.initial_profiling_level = None self._ctx_query_counter = 0 # number of queries issued by the context @@ -196,13 +210,14 @@ def __init__(self, alias=DEFAULT_CONNECTION_NAME): } def _turn_on_profiling(self): - self.initial_profiling_level = self.db.profiling_level() - self.db.set_profiling_level(0) + profile_update_res = self.db.command({"profile": 0}) + self.initial_profiling_level = profile_update_res["was"] + self.db.system.profile.drop() - self.db.set_profiling_level(2) + self.db.command({"profile": 2}) def _resets_profiling(self): - self.db.set_profiling_level(self.initial_profiling_level) + self.db.command({"profile": self.initial_profiling_level}) def __enter__(self): self._turn_on_profiling() diff --git a/mongoengine/dereference.py b/mongoengine/dereference.py index f388fe069..af817902e 100644 --- a/mongoengine/dereference.py +++ b/mongoengine/dereference.py @@ -165,7 +165,6 @@ def _fetch_objects(self, doc_type=None): """Fetch all references and convert to their document objects""" object_map = {} for collection, dbrefs in self.reference_map.items(): - # we use getattr instead of hasattr because hasattr swallows any exception under python2 # so it could hide nasty things without raising exceptions (cfr bug #1688)) ref_document_cls_exists = getattr(collection, "objects", None) is not None diff --git a/mongoengine/document.py b/mongoengine/document.py index 0ba5db126..3fddec36d 100644 --- a/mongoengine/document.py +++ b/mongoengine/document.py @@ -99,6 +99,15 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + def __getstate__(self): + data = super().__getstate__() + data["_instance"] = None + return data + + def __setstate__(self, state): + super().__setstate__(state) + self._instance = state["_instance"] + def to_mongo(self, *args, **kwargs): data = super().to_mongo(*args, **kwargs) @@ -126,7 +135,7 @@ class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): create a specialised version of the document that will be stored in the same collection. To facilitate this behaviour a `_cls` field is added to documents (hidden though the MongoEngine interface). - To enable this behaviourset :attr:`allow_inheritance` to ``True`` in the + To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the :attr:`meta` dictionary. A :class:`~mongoengine.Document` may use a **Capped Collection** by @@ -217,8 +226,7 @@ def _get_collection(cls): cls._collection = db[collection_name] # Ensure indexes on the collection unless auto_create_index was - # set to False. - # Also there is no need to ensure indexes on slave. + # set to False. Plus, there is no need to ensure indexes on slave. db = cls._get_db() if cls._meta.get("auto_create_index", True) and db.client.is_primary: cls.ensure_indexes() @@ -232,7 +240,7 @@ def _get_capped_collection(cls): collection_name = cls._get_collection_name() # Get max document limit and max byte size from meta. - max_size = cls._meta.get("max_size") or 10 * 2 ** 20 # 10MB default + max_size = cls._meta.get("max_size") or 10 * 2**20 # 10MB default max_documents = cls._meta.get("max_documents") # MongoDB will automatically raise the size to make it a multiple of @@ -375,6 +383,10 @@ def save( meta['cascade'] = True. Also you can pass different kwargs to the cascade save using cascade_kwargs which overwrites the existing kwargs with custom values. + .. versionchanged:: 0.26 + save() no longer calls :meth:`~mongoengine.Document.ensure_indexes` + unless ``meta['auto_create_index_on_save']`` is set to True. + """ signal_kwargs = signal_kwargs or {} @@ -398,13 +410,21 @@ def save( # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation doc = self.to_mongo() - if self._meta.get("auto_create_index", True): + # Initialize the Document's underlying pymongo.Collection (+create indexes) if not already initialized + # Important to do this here to avoid that the index creation gets wrapped in the try/except block below + # and turned into mongoengine.OperationError + if self._collection is None: + _ = self._get_collection() + elif self._meta.get("auto_create_index_on_save", False): + # ensure_indexes is called as part of _get_collection so no need to re-call it again here self.ensure_indexes() try: # Save a new document or update an existing one if created: - object_id = self._save_create(doc, force_insert, write_concern) + object_id = self._save_create( + doc=doc, force_insert=force_insert, write_concern=write_concern + ) else: object_id, created = self._save_update( doc, save_condition, write_concern @@ -574,7 +594,8 @@ def cascade_save(self, **kwargs): def _qs(self): """Return the default queryset corresponding to this document.""" if not hasattr(self, "__objects"): - self.__objects = QuerySet(self.__class__, self._get_collection()) + queryset_class = self._meta.get("queryset_class", QuerySet) + self.__objects = queryset_class(self.__class__, self._get_collection()) return self.__objects @property @@ -848,24 +869,20 @@ def create_index(cls, keys, background=False, **kwargs): return cls._get_collection().create_index(fields, **index_spec) - @classmethod - def ensure_index(cls, key_or_list, background=False, **kwargs): - """Ensure that the given indexes are in place. Deprecated in favour - of create_index. - - :param key_or_list: a single index key or a list of index keys (to - construct a multi-field index); keys may be prefixed with a **+** - or a **-** to determine the index ordering - :param background: Allows index creation in the background - """ - return cls.create_index(key_or_list, background=background, **kwargs) - @classmethod def ensure_indexes(cls): """Checks the document meta data and ensures all the indexes exist. Global defaults can be set in the meta - see :doc:`guide/defining-documents` + By default, this will get called automatically upon first interaction with the + Document collection (query, save, etc) so unless you disabled `auto_create_index`, you + shouldn't have to call this manually. + + This also gets called upon every call to Document.save if `auto_create_index_on_save` is set to True + + If called multiple times, MongoDB will not re-recreate indexes if they exist already + .. note:: You can disable automatic index creation by setting `auto_create_index` to False in the documents meta data """ @@ -874,10 +891,6 @@ def ensure_indexes(cls): index_cls = cls._meta.get("index_cls", True) collection = cls._get_collection() - # 746: when connection is via mongos, the read preference is not necessarily an indication that - # this code runs on a secondary - if not collection.is_mongos and collection.read_preference > 1: - return # determine if an index which we are creating includes # _cls as its first field; if so, we can avoid creating @@ -905,7 +918,6 @@ def ensure_indexes(cls): # If _cls is being used (for polymorphism), it needs an index, # only if another index doesn't begin with _cls if index_cls and not cls_indexed and cls._meta.get("allow_inheritance"): - # we shouldn't pass 'cls' to the collection.ensureIndex options # because of https://jira.mongodb.org/browse/SERVER-769 if "cls" in index_opts: @@ -915,8 +927,10 @@ def ensure_indexes(cls): @classmethod def list_indexes(cls): - """Lists all of the indexes that should be created for given - collection. It includes all the indexes from super- and sub-classes. + """Lists all indexes that should be created for the Document collection. + It includes all the indexes from super- and sub-classes. + + Note that it will only return the indexes' fields, not the indexes' options """ if cls._meta.get("abstract"): return [] @@ -925,7 +939,6 @@ def list_indexes(cls): classes = [] def get_classes(cls): - if cls not in classes and isinstance(cls, TopLevelDocumentMetaclass): classes.append(cls) @@ -952,7 +965,7 @@ def get_classes(cls): get_classes(cls) - # get the indexes spec for all of the gathered classes + # get the indexes spec for all the gathered classes def get_indexes_spec(cls): indexes = [] @@ -987,8 +1000,10 @@ def compare_indexes(cls): required = cls.list_indexes() existing = [] - for info in cls._get_collection().index_information().values(): + collection = cls._get_collection() + for info in collection.index_information().values(): if "_fts" in info["key"][0]: + # Useful for text indexes (but not only) index_type = info["key"][0][1] text_index_fields = info.get("weights").keys() existing.append([(key, index_type) for key in text_index_fields]) diff --git a/mongoengine/errors.py b/mongoengine/errors.py index f6b426d1c..d789b2a10 100644 --- a/mongoengine/errors.py +++ b/mongoengine/errors.py @@ -17,11 +17,15 @@ ) -class NotRegistered(Exception): +class MongoEngineException(Exception): pass -class InvalidDocumentError(Exception): +class NotRegistered(MongoEngineException): + pass + + +class InvalidDocumentError(MongoEngineException): pass @@ -29,19 +33,19 @@ class LookUpError(AttributeError): pass -class DoesNotExist(Exception): +class DoesNotExist(MongoEngineException): pass -class MultipleObjectsReturned(Exception): +class MultipleObjectsReturned(MongoEngineException): pass -class InvalidQueryError(Exception): +class InvalidQueryError(MongoEngineException): pass -class OperationError(Exception): +class OperationError(MongoEngineException): pass @@ -57,7 +61,7 @@ class SaveConditionError(OperationError): pass -class FieldDoesNotExist(Exception): +class FieldDoesNotExist(MongoEngineException): """Raised when trying to set a field not declared in a :class:`~mongoengine.Document` or an :class:`~mongoengine.EmbeddedDocument`. @@ -155,7 +159,7 @@ def generate_key(value, prefix=""): return " ".join([f"{k}: {v}" for k, v in error_dict.items()]) -class DeprecatedError(Exception): +class DeprecatedError(MongoEngineException): """Raise when a user uses a feature that has been Deprecated""" pass diff --git a/mongoengine/fields.py b/mongoengine/fields.py index 7b2fe47ff..40469bfce 100644 --- a/mongoengine/fields.py +++ b/mongoengine/fields.py @@ -12,6 +12,7 @@ import gridfs import pymongo from bson import SON, Binary, DBRef, ObjectId +from bson.decimal128 import Decimal128, create_decimal128_context from bson.int64 import Int64 from pymongo import ReturnDocument @@ -46,6 +47,8 @@ try: from PIL import Image, ImageOps + + LANCZOS = Image.LANCZOS if hasattr(Image, "LANCZOS") else Image.ANTIALIAS except ImportError: Image = None ImageOps = None @@ -95,6 +98,7 @@ "MultiLineStringField", "MultiPolygonField", "GeoJsonBaseField", + "Decimal128Field", ) RECURSIVE_REFERENCE_CONSTANT = "self" @@ -157,10 +161,17 @@ def prepare_query_value(self, op, value): regex = r"%s$" elif op == "exact": regex = r"^%s$" + elif op == "wholeword": + regex = r"\b%s\b" + elif op == "regex": + regex = value - # escape unsafe characters which could lead to a re.error - value = re.escape(value) - value = re.compile(regex % value, flags) + if op == "regex": + value = re.compile(regex, flags) + else: + # escape unsafe characters which could lead to a re.error + value = re.escape(value) + value = re.compile(regex % value, flags) return super().prepare_query_value(op, value) @@ -347,46 +358,12 @@ def prepare_query_value(self, op, value): return super().prepare_query_value(op, int(value)) -class LongField(BaseField): +class LongField(IntField): """64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)""" - def __init__(self, min_value=None, max_value=None, **kwargs): - """ - :param min_value: (optional) A min value that will be applied during validation - :param max_value: (optional) A max value that will be applied during validation - :param kwargs: Keyword arguments passed into the parent :class:`~mongoengine.BaseField` - """ - self.min_value, self.max_value = min_value, max_value - super().__init__(**kwargs) - - def to_python(self, value): - try: - value = int(value) - except (TypeError, ValueError): - pass - return value - def to_mongo(self, value): return Int64(value) - def validate(self, value): - try: - value = int(value) - except (TypeError, ValueError): - self.error("%s could not be converted to long" % value) - - if self.min_value is not None and value < self.min_value: - self.error("Long value is too small") - - if self.max_value is not None and value > self.max_value: - self.error("Long value is too large") - - def prepare_query_value(self, op, value): - if value is None: - return value - - return super().prepare_query_value(op, int(value)) - class FloatField(BaseField): """Floating point number field.""" @@ -431,7 +408,10 @@ def prepare_query_value(self, op, value): class DecimalField(BaseField): - """Fixed-point decimal number field. Stores the value as a float by default unless `force_string` is used. + """Disclaimer: This field is kept for historical reason but since it converts the values to float, it + is not suitable for true decimal storage. Consider using :class:`~mongoengine.fields.Decimal128Field`. + + Fixed-point decimal number field. Stores the value as a float by default unless `force_string` is used. If using floats, beware of Decimal to float conversion (potential precision loss) """ @@ -468,27 +448,29 @@ def __init__( self.min_value = min_value self.max_value = max_value self.force_string = force_string + + if precision < 0 or not isinstance(precision, int): + self.error("precision must be a positive integer") + self.precision = precision self.rounding = rounding super().__init__(**kwargs) def to_python(self, value): - if value is None: - return value - # Convert to string for python 2.6 before casting to Decimal try: value = decimal.Decimal("%s" % value) except (TypeError, ValueError, decimal.InvalidOperation): return value - return value.quantize( - decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding - ) + if self.precision > 0: + return value.quantize( + decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding + ) + else: + return value.quantize(decimal.Decimal(), rounding=self.rounding) def to_mongo(self, value): - if value is None: - return value if self.force_string: return str(self.to_python(value)) return float(self.to_python(value)) @@ -509,6 +491,8 @@ def validate(self, value): self.error("Decimal value is too large") def prepare_query_value(self, op, value): + if value is None: + return value return super().prepare_query_value(op, self.to_mongo(value)) @@ -712,6 +696,8 @@ def to_mongo(self, value): return self._convert_from_datetime(value) def prepare_query_value(self, op, value): + if value is None: + return value return super().prepare_query_value(op, self._convert_from_datetime(value)) @@ -775,7 +761,7 @@ def validate(self, value, clean=True): "Invalid embedded document instance provided to an " "EmbeddedDocumentField" ) - self.document_type.validate(value, clean) + value.validate(clean=clean) def lookup_member(self, member_name): doc_and_subclasses = [self.document_type] + self.document_type.__subclasses__() @@ -963,7 +949,6 @@ def prepare_query_value(self, op, value): self.error("List is too long") if self.field: - # If the value is iterable and it's not a string nor a # BaseDocument, call prepare_query_value for each of its items. if ( @@ -1079,16 +1064,7 @@ def lookup_member(self, member_name): return DictField(db_field=member_name) def prepare_query_value(self, op, value): - match_operators = [ - "contains", - "icontains", - "startswith", - "istartswith", - "endswith", - "iendswith", - "exact", - "iexact", - ] + match_operators = [*STRING_OPERATORS] if op in match_operators and isinstance(value, str): return StringField().prepare_query_value(op, value) @@ -1112,7 +1088,7 @@ class MapField(DictField): """ def __init__(self, field=None, *args, **kwargs): - # XXX ValidationError raised outside of the "validate" method. + # XXX ValidationError raised outside the "validate" method. if not isinstance(field, BaseField): self.error("Argument to MapField constructor must be a valid field") super().__init__(field=field, *args, **kwargs) @@ -1611,11 +1587,14 @@ class EnumField(BaseField): """Enumeration Field. Values are stored underneath as is, so it will only work with simple types (str, int, etc) that are bson encodable - Example usage: + + Example usage: + .. code-block:: python class Status(Enum): NEW = 'new' + ONGOING = 'ongoing' DONE = 'done' class ModelWithEnum(Document): @@ -1625,13 +1604,18 @@ class ModelWithEnum(Document): ModelWithEnum(status=Status.DONE) Enum fields can be searched using enum or its value: + .. code-block:: python ModelWithEnum.objects(status='new').count() ModelWithEnum.objects(status=Status.NEW).count() - Note that choices cannot be set explicitly, they are derived - from the provided enum class. + The values can be restricted to a subset of the enum by using the ``choices`` parameter: + + .. code-block:: python + + class ModelWithEnum(Document): + status = EnumField(Status, choices=[Status.NEW, Status.DONE]) """ def __init__(self, enum, **kwargs): @@ -1647,14 +1631,25 @@ def __init__(self, enum, **kwargs): kwargs["choices"] = list(self._enum_cls) # Implicit validator super().__init__(**kwargs) - def __set__(self, instance, value): - is_legal_value = value is None or isinstance(value, self._enum_cls) - if not is_legal_value: + def validate(self, value): + if isinstance(value, self._enum_cls): + return super().validate(value) + try: + self._enum_cls(value) + except ValueError: + self.error(f"{value} is not a valid {self._enum_cls}") + + def to_python(self, value): + value = super().to_python(value) + if not isinstance(value, self._enum_cls): try: - value = self._enum_cls(value) - except Exception: - pass - return super().__set__(instance, value) + return self._enum_cls(value) + except ValueError: + return value + return value + + def __set__(self, instance, value): + return super().__set__(instance, self.to_python(value)) def to_mongo(self, value): if isinstance(value, self._enum_cls): @@ -1953,23 +1948,19 @@ def put(self, file_obj, **kwargs): size = field.size if size["force"]: - img = ImageOps.fit( - img, (size["width"], size["height"]), Image.ANTIALIAS - ) + img = ImageOps.fit(img, (size["width"], size["height"]), LANCZOS) else: - img.thumbnail((size["width"], size["height"]), Image.ANTIALIAS) + img.thumbnail((size["width"], size["height"]), LANCZOS) thumbnail = None if field.thumbnail_size: size = field.thumbnail_size if size["force"]: - thumbnail = ImageOps.fit( - img, (size["width"], size["height"]), Image.ANTIALIAS - ) + thumbnail = ImageOps.fit(img, (size["width"], size["height"]), LANCZOS) else: thumbnail = img.copy() - thumbnail.thumbnail((size["width"], size["height"]), Image.ANTIALIAS) + thumbnail.thumbnail((size["width"], size["height"]), LANCZOS) if thumbnail: thumb_id = self._put_thumbnail(thumbnail, img_format, progressive) @@ -2188,7 +2179,6 @@ def __get__(self, instance, owner): return value def __set__(self, instance, value): - if value is None and instance._initialised: value = self.generate() @@ -2637,3 +2627,49 @@ def to_mongo(self, document): ) else: return super().to_mongo(document) + + +class Decimal128Field(BaseField): + """ + 128-bit decimal-based floating-point field capable of emulating decimal + rounding with exact precision. This field will expose decimal.Decimal but stores the value as a + `bson.Decimal128` behind the scene, this field is intended for monetary data, scientific computations, etc. + """ + + DECIMAL_CONTEXT = create_decimal128_context() + + def __init__(self, min_value=None, max_value=None, **kwargs): + self.min_value = min_value + self.max_value = max_value + super().__init__(**kwargs) + + def to_mongo(self, value): + if value is None: + return None + if isinstance(value, Decimal128): + return value + if not isinstance(value, decimal.Decimal): + with decimal.localcontext(self.DECIMAL_CONTEXT) as ctx: + value = ctx.create_decimal(value) + return Decimal128(value) + + def to_python(self, value): + if value is None: + return None + return self.to_mongo(value).to_decimal() + + def validate(self, value): + if not isinstance(value, Decimal128): + try: + value = Decimal128(value) + except (TypeError, ValueError, decimal.InvalidOperation) as exc: + self.error("Could not convert value to Decimal128: %s" % exc) + + if self.min_value is not None and value.to_decimal() < self.min_value: + self.error("Decimal value is too small") + + if self.max_value is not None and value.to_decimal() > self.max_value: + self.error("Decimal value is too large") + + def prepare_query_value(self, op, value): + return super().prepare_query_value(op, self.to_mongo(value)) diff --git a/mongoengine/mongodb_support.py b/mongoengine/mongodb_support.py index 00b87f419..23c538d02 100644 --- a/mongoengine/mongodb_support.py +++ b/mongoengine/mongodb_support.py @@ -9,6 +9,9 @@ MONGODB_36 = (3, 6) MONGODB_42 = (4, 2) MONGODB_44 = (4, 4) +MONGODB_50 = (5, 0) +MONGODB_60 = (6, 0) +MONGODB_70 = (7, 0) def get_mongodb_version(): diff --git a/mongoengine/pymongo_support.py b/mongoengine/pymongo_support.py index 837f683ee..7aa55676b 100644 --- a/mongoengine/pymongo_support.py +++ b/mongoengine/pymongo_support.py @@ -1,14 +1,20 @@ """ -Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. +Helper functions, constants, and types to aid with PyMongo support. """ import pymongo +from bson import binary, json_util from pymongo.errors import OperationFailure -_PYMONGO_37 = (3, 7) - PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) -IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 +# This will be changed to UuidRepresentation.UNSPECIFIED in a future +# (breaking) release. +if PYMONGO_VERSION >= (4,): + LEGACY_JSON_OPTIONS = json_util.LEGACY_JSON_OPTIONS.with_options( + uuid_representation=binary.UuidRepresentation.PYTHON_LEGACY, + ) +else: + LEGACY_JSON_OPTIONS = json_util.DEFAULT_JSON_OPTIONS def count_documents( @@ -29,7 +35,7 @@ def count_documents( kwargs["collation"] = collation # count_documents appeared in pymongo 3.7 - if IS_PYMONGO_GTE_37: + if PYMONGO_VERSION >= (3, 7): try: if not filter and set(kwargs) <= {"max_time_ms"}: # when no filter is provided, estimated_document_count @@ -37,15 +43,18 @@ def count_documents( return collection.estimated_document_count(**kwargs) else: return collection.count_documents(filter=filter, **kwargs) - except OperationFailure as exc: + except OperationFailure as err: + if PYMONGO_VERSION >= (4,): + raise + # OperationFailure - accounts for some operators that used to work # with .count but are no longer working with count_documents (i.e $geoNear, $near, and $nearSphere) # fallback to deprecated Cursor.count # Keeping this should be reevaluated the day pymongo removes .count entirely if ( "$geoNear, $near, and $nearSphere are not allowed in this context" - not in str(exc) - and "$where is not allowed in this context" not in str(exc) + not in str(err) + and "$where is not allowed in this context" not in str(err) ): raise @@ -59,7 +68,7 @@ def count_documents( def list_collection_names(db, include_system_collections=False): """Pymongo>3.7 deprecates collection_names in favour of list_collection_names""" - if IS_PYMONGO_GTE_37: + if PYMONGO_VERSION >= (3, 7): collections = db.list_collection_names() else: collections = db.collection_names() diff --git a/mongoengine/queryset/base.py b/mongoengine/queryset/base.py index 5dc47e001..d33e7b1e3 100644 --- a/mongoengine/queryset/base.py +++ b/mongoengine/queryset/base.py @@ -28,7 +28,10 @@ NotUniqueError, OperationError, ) -from mongoengine.pymongo_support import count_documents +from mongoengine.pymongo_support import ( + LEGACY_JSON_OPTIONS, + count_documents, +) from mongoengine.queryset import transform from mongoengine.queryset.field_list import QueryFieldList from mongoengine.queryset.visitor import Q, QNode @@ -234,7 +237,7 @@ def search_text(self, text, language=None): for the search and the rules for the stemmer and tokenizer. If not specified, the search uses the default language of the index. For supported languages, see - `Text Search Languages `. + `Text Search Languages `. """ queryset = self.clone() if queryset._search_text: @@ -252,7 +255,7 @@ def search_text(self, text, language=None): return queryset def get(self, *q_objs, **query): - """Retrieve the the matching object raising + """Retrieve the matching object raising :class:`~mongoengine.queryset.MultipleObjectsReturned` or `DocumentName.MultipleObjectsReturned` exception if multiple results and :class:`~mongoengine.queryset.DoesNotExist` or @@ -286,6 +289,9 @@ def create(self, **kwargs): def first(self): """Retrieve the first object matching the query.""" queryset = self.clone() + if self._none or self._empty: + return None + try: result = queryset[0] except IndexError: @@ -548,11 +554,18 @@ def update( if write_concern is None: write_concern = {} + if self._none or self._empty: + return 0 queryset = self.clone() query = queryset._query - update = transform.update(queryset._document, **update) - + if "__raw__" in update and isinstance(update["__raw__"], list): + update = [ + transform.update(queryset._document, **{"__raw__": u}) + for u in update["__raw__"] + ] + else: + update = transform.update(queryset._document, **update) # If doing an atomic upsert on an inheritable class # then ensure we add _cls to the update operation if upsert and "_cls" in query: @@ -665,6 +678,9 @@ def modify( if not update and not upsert and not remove: raise OperationError("No update parameters, must either update or remove") + if self._none or self._empty: + return None + queryset = self.clone() query = queryset._query if not remove: @@ -714,13 +730,13 @@ def with_id(self, object_id): :param object_id: the value for the id of the document to look up """ queryset = self.clone() - if not queryset._query_obj.empty: + if queryset._query_obj: msg = "Cannot use a filter whilst using `with_id`" raise InvalidQueryError(msg) return queryset.filter(pk=object_id).first() def in_bulk(self, object_ids): - """ "Retrieve a set of documents by their ids. + """Retrieve a set of documents by their ids. :param object_ids: a list or tuple of ObjectId's :rtype: dict of ObjectId's as keys and collection-specific @@ -946,9 +962,11 @@ def distinct(self, field): except LookUpError: pass - distinct = self._dereference( - queryset._cursor.distinct(field), 1, name=field, instance=self._document - ) + raw_values = queryset._cursor.distinct(field) + if not self._auto_dereference: + return raw_values + + distinct = self._dereference(raw_values, 1, name=field, instance=self._document) doc_field = self._document._fields.get(field.split(".", 1)[0]) instance = None @@ -1110,7 +1128,6 @@ def order_by(self, *keys): new_ordering = queryset._get_order_by(keys) if queryset._cursor_obj: - # If a cursor object has already been created, apply the sort to it if new_ordering: queryset._cursor_obj.sort(new_ordering) @@ -1261,6 +1278,16 @@ def max_time_ms(self, ms): def to_json(self, *args, **kwargs): """Converts a queryset to JSON""" + if "json_options" not in kwargs: + warnings.warn( + "No 'json_options' are specified! Falling back to " + "LEGACY_JSON_OPTIONS with uuid_representation=PYTHON_LEGACY. " + "For use with other MongoDB drivers specify the UUID " + "representation to use. This will be changed to " + "uuid_representation=UNSPECIFIED in a future release.", + DeprecationWarning, + ) + kwargs["json_options"] = LEGACY_JSON_OPTIONS return json_util.dumps(self.as_pymongo(), *args, **kwargs) def from_json(self, json_data): @@ -1269,10 +1296,10 @@ def from_json(self, json_data): return [self._document._from_son(data) for data in son_data] def aggregate(self, pipeline, *suppl_pipeline, **kwargs): - """Perform a aggregate function based in your queryset params + """Perform an aggregate function based on your queryset params - :param pipeline: list of aggregation commands,\ - see: http://docs.mongodb.org/manual/core/aggregation-pipeline/ + :param pipeline: list of aggregation commands, + see: https://www.mongodb.com/docs/manual/core/aggregation-pipeline/ :param suppl_pipeline: unpacked list of pipeline (added to support deprecation of the old interface) parameter will be removed shortly :param kwargs: (optional) kwargs dictionary to be passed to pymongo's aggregate call @@ -1288,6 +1315,10 @@ def aggregate(self, pipeline, *suppl_pipeline, **kwargs): user_pipeline += suppl_pipeline initial_pipeline = [] + if self._none or self._empty: + initial_pipeline.append({"$limit": 1}) + initial_pipeline.append({"$match": {"$expr": False}}) + if self._query: initial_pipeline.append({"$match": self._query}) @@ -1330,9 +1361,8 @@ def map_reduce( :param map_f: map function, as :class:`~bson.code.Code` or string :param reduce_f: reduce function, as :class:`~bson.code.Code` or string - :param output: output collection name, if set to 'inline' will try to - use :class:`~pymongo.collection.Collection.inline_map_reduce` - This can also be a dictionary containing output options + :param output: output collection name, if set to 'inline' will return + the results inline. This can also be a dictionary containing output options see: http://docs.mongodb.org/manual/reference/command/mapReduce/#dbcmd.mapReduce :param finalize_f: finalize function, an optional function that performs any post-reduction processing. @@ -1342,12 +1372,6 @@ def map_reduce( Returns an iterator yielding :class:`~mongoengine.document.MapReduceDocument`. - - .. note:: - - Map/Reduce changed in server version **>= 1.7.4**. The PyMongo - :meth:`~pymongo.collection.Collection.map_reduce` helper requires - PyMongo version **>= 1.11**. """ queryset = self.clone() @@ -1384,10 +1408,10 @@ def map_reduce( mr_args["limit"] = limit if output == "inline" and not queryset._ordering: - map_reduce_function = "inline_map_reduce" + inline = True + mr_args["out"] = {"inline": 1} else: - map_reduce_function = "map_reduce" - + inline = False if isinstance(output, str): mr_args["out"] = output @@ -1417,17 +1441,29 @@ def map_reduce( mr_args["out"] = SON(ordered_output) - results = getattr(queryset._collection, map_reduce_function)( - map_f, reduce_f, **mr_args + db = queryset._document._get_db() + result = db.command( + { + "mapReduce": queryset._document._get_collection_name(), + "map": map_f, + "reduce": reduce_f, + **mr_args, + } ) - if map_reduce_function == "map_reduce": - results = results.find() + if inline: + docs = result["results"] + else: + if isinstance(result["result"], str): + docs = db[result["result"]].find() + else: + info = result["result"] + docs = db.client[info["db"]][info["collection"]].find() if queryset._ordering: - results = results.sort(queryset._ordering) + docs = docs.sort(queryset._ordering) - for doc in results: + for doc in docs: yield MapReduceDocument( queryset._document, queryset._collection, doc["_id"], doc["value"] ) @@ -1471,7 +1507,7 @@ def exec_js(self, code, *fields, **options): code = Code(code, scope=scope) db = queryset._document._get_db() - return db.eval(code, *fields) + return db.command("eval", code, args=fields).get("retval") def where(self, where_clause): """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript @@ -1715,29 +1751,29 @@ def no_dereference(self): def _item_frequencies_map_reduce(self, field, normalize=False): map_func = """ - function() { - var path = '{{~%(field)s}}'.split('.'); + function() {{ + var path = '{{{{~{field}}}}}'.split('.'); var field = this; - for (p in path) { + for (p in path) {{ if (typeof field != 'undefined') field = field[path[p]]; else break; - } - if (field && field.constructor == Array) { - field.forEach(function(item) { + }} + if (field && field.constructor == Array) {{ + field.forEach(function(item) {{ emit(item, 1); - }); - } else if (typeof field != 'undefined') { + }}); + }} else if (typeof field != 'undefined') {{ emit(field, 1); - } else { + }} else {{ emit(null, 1); - } - } - """ % { - "field": field - } + }} + }} + """.format( + field=field + ) reduce_func = """ function(key, values) { var total = 0; diff --git a/mongoengine/queryset/queryset.py b/mongoengine/queryset/queryset.py index b7c70bc0f..e0f7765b9 100644 --- a/mongoengine/queryset/queryset.py +++ b/mongoengine/queryset/queryset.py @@ -87,7 +87,6 @@ def _iter_results(self): pos = 0 while True: - # For all positions lower than the length of the current result # cache, serve the docs straight from the cache w/o hitting the # database. diff --git a/mongoengine/queryset/transform.py b/mongoengine/queryset/transform.py index 4c3d051bf..a95a84681 100644 --- a/mongoengine/queryset/transform.py +++ b/mongoengine/queryset/transform.py @@ -51,6 +51,10 @@ "iendswith", "exact", "iexact", + "regex", + "iregex", + "wholeword", + "iwholeword", ) CUSTOM_OPERATORS = ("match",) MATCH_OPERATORS = ( @@ -260,12 +264,24 @@ def update(_doc_cls=None, **update): op = operator_map.get(op, op) match = None - if parts[-1] in COMPARISON_OPERATORS: - match = parts.pop() - # Allow to escape operator-like field name by __ - if len(parts) > 1 and parts[-1] == "": - parts.pop() + if len(parts) == 1: + # typical update like set__field + # but also allows to update a field named like a comparison operator + # like set__type = "something" (without clashing with the 'type' operator) + pass + elif len(parts) > 1: + # can be either an embedded field like set__foo__bar + # or a comparison operator as in pull__foo__in + if parts[-1] in COMPARISON_OPERATORS: + match = parts.pop() # e.g. pop 'in' from pull__foo__in + + # Allow to escape operator-like field name by __ + # e.g. in the case of an embedded foo.type field + # Doc.objects().update(set__foo__type="bar") + # see https://github.com/MongoEngine/mongoengine/pull/1351 + if parts[-1] == "": + match = parts.pop() # e.g. pop last '__' from set__foo__type__ if _doc_cls: # Switch field names to proper names [set in Field(name='foo')] diff --git a/setup.py b/setup.py index 27ed987ef..b1f175233 100644 --- a/setup.py +++ b/setup.py @@ -87,7 +87,7 @@ def run_tests(self): # import it as it depends on PyMongo and PyMongo isn't installed until this # file is read init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py") -version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] +version_line = list(filter(lambda line: line.startswith("VERSION"), open(init)))[0] VERSION = get_version(eval(version_line.split("=")[-1])) @@ -101,6 +101,8 @@ def run_tests(self): "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database", @@ -110,7 +112,7 @@ def run_tests(self): extra_opts = { "packages": find_packages(exclude=["tests", "tests.*"]), "tests_require": [ - "pytest<5.0", + "pytest", "pytest-cov", "coverage", "blinker", @@ -139,8 +141,8 @@ def run_tests(self): long_description=LONG_DESCRIPTION, platforms=["any"], classifiers=CLASSIFIERS, - python_requires=">=3.6", - install_requires=["pymongo>=3.4, <4.0"], + python_requires=">=3.7", + install_requires=["pymongo>=3.4,<5.0"], cmdclass={"test": PyTest}, **extra_opts ) diff --git a/tests/__init__.py b/tests/__init__.py index e69de29bb..29a312ae0 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,7 @@ +import os + +_THIS_MODULE = os.path.abspath(__file__) +TESTS_DIR = os.path.dirname(_THIS_MODULE) + +ROOT_DIR = os.path.dirname(TESTS_DIR) +DOCS_DIR = os.path.join(ROOT_DIR, "docs") diff --git a/tests/document/test_class_methods.py b/tests/document/test_class_methods.py index f82808ba0..0ab9aa7cd 100644 --- a/tests/document/test_class_methods.py +++ b/tests/document/test_class_methods.py @@ -90,7 +90,7 @@ class BlogPost(Document): BlogPost.ensure_indexes() assert BlogPost.compare_indexes() == {"missing": [], "extra": []} - BlogPost.ensure_index(["author", "description"]) + BlogPost.create_index(["author", "description"]) assert BlogPost.compare_indexes() == { "missing": [], "extra": [[("author", 1), ("description", 1)]], @@ -130,7 +130,7 @@ class BlogPostWithTags(BlogPost): BlogPostWithTags.ensure_indexes() assert BlogPost.compare_indexes() == {"missing": [], "extra": []} - BlogPostWithTags.ensure_index(["author", "tag_list"]) + BlogPostWithTags.create_index(["author", "tag_list"]) assert BlogPost.compare_indexes() == { "missing": [], "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]], diff --git a/tests/document/test_delta.py b/tests/document/test_delta.py index 68c698b64..e610290b6 100644 --- a/tests/document/test_delta.py +++ b/tests/document/test_delta.py @@ -4,7 +4,7 @@ from mongoengine import * from mongoengine.pymongo_support import list_collection_names -from tests.utils import MongoDBTestCase +from tests.utils import MongoDBTestCase, get_as_pymongo class TestDelta(MongoDBTestCase): @@ -952,6 +952,49 @@ class Doc(Document): assert "oops" == delta[0]["users.007.rolist"][0]["type"] assert uinfo.id == delta[0]["users.007.info"] + def test_delta_on_dict(self): + class MyDoc(Document): + dico = DictField() + + MyDoc.drop_collection() + + MyDoc(dico={"a": {"b": 0}}).save() + + mydoc = MyDoc.objects.first() + assert mydoc._get_changed_fields() == [] + mydoc.dico["a"]["b"] = 0 + assert mydoc._get_changed_fields() == [] + mydoc.dico["a"] = {"b": 0} + assert mydoc._get_changed_fields() == [] + mydoc.dico = {"a": {"b": 0}} + assert mydoc._get_changed_fields() == [] + mydoc.dico["a"]["c"] = 1 + assert mydoc._get_changed_fields() == ["dico.a.c"] + mydoc.dico["a"]["b"] = 2 + mydoc.dico["d"] = 3 + assert mydoc._get_changed_fields() == ["dico.a.c", "dico.a.b", "dico.d"] + + mydoc._clear_changed_fields() + assert mydoc._get_changed_fields() == [] + + def test_delta_on_dict_empty_key_triggers_full_change(self): + """more of a bug (harmless) but empty key changes aren't managed perfectly""" + + class MyDoc(Document): + dico = DictField() + + MyDoc.drop_collection() + + MyDoc(dico={"a": {"b": 0}}).save() + + mydoc = MyDoc.objects.first() + assert mydoc._get_changed_fields() == [] + mydoc.dico[""] = 3 + assert mydoc._get_changed_fields() == ["dico"] + mydoc.save() + raw_doc = get_as_pymongo(mydoc) + assert raw_doc == {"_id": mydoc.id, "dico": {"": 3, "a": {"b": 0}}} + if __name__ == "__main__": unittest.main() diff --git a/tests/document/test_dynamic.py b/tests/document/test_dynamic.py index 909c6f796..170b2ea3d 100644 --- a/tests/document/test_dynamic.py +++ b/tests/document/test_dynamic.py @@ -28,9 +28,9 @@ def test_simple_dynamic_document(self): p.age = 34 assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34} - assert p.to_mongo().keys() == ["_cls", "name", "age"] + assert sorted(p.to_mongo().keys()) == ["_cls", "age", "name"] p.save() - assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"] + assert sorted(p.to_mongo().keys()) == ["_cls", "_id", "age", "name"] assert self.Person.objects.first().age == 34 diff --git a/tests/document/test_indexes.py b/tests/document/test_indexes.py index e308d12a9..8a0486412 100644 --- a/tests/document/test_indexes.py +++ b/tests/document/test_indexes.py @@ -9,8 +9,10 @@ from mongoengine.connection import get_db from mongoengine.mongodb_support import ( MONGODB_42, + MONGODB_70, get_mongodb_version, ) +from mongoengine.pymongo_support import PYMONGO_VERSION class TestIndexes(unittest.TestCase): @@ -247,10 +249,9 @@ class Place(Document): def test_explicit_geohaystack_index(self): """Ensure that geohaystack indexes work when created via meta[indexes]""" - pytest.skip( - "GeoHaystack index creation is not supported for now" - "from meta, as it requires a bucketSize parameter." - ) + # This test can be removed when pymongo 3.x is no longer supported + if PYMONGO_VERSION >= (4,): + pytest.skip("GEOHAYSTACK has been removed in pymongo 4.0") class Place(Document): location = DictField() @@ -261,10 +262,13 @@ class Place(Document): {"fields": [("location.point", "geoHaystack"), ("name", 1)]} ] == Place._meta["index_specs"] - Place.ensure_indexes() - info = Place._get_collection().index_information() - info = [value["key"] for key, value in info.items()] - assert [("location.point", "geoHaystack")] in info + # GeoHaystack index creation is not supported for now from meta, as it + # requires a bucketSize parameter. + if False: + Place.ensure_indexes() + info = Place._get_collection().index_information() + info = [value["key"] for key, value in info.items()] + assert [("location.point", "geoHaystack")] in info def test_create_geohaystack_index(self): """Ensure that geohaystack indexes can be created""" @@ -273,10 +277,25 @@ class Place(Document): location = DictField() name = StringField() - Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) - info = Place._get_collection().index_information() - info = [value["key"] for key, value in info.items()] - assert [("location.point", "geoHaystack"), ("name", 1)] in info + if PYMONGO_VERSION >= (4,): + expected_error = NotImplementedError + elif get_mongodb_version() >= (4, 9): + expected_error = OperationFailure + else: + expected_error = None + + # This test can be removed when pymongo 3.x is no longer supported + if expected_error: + with pytest.raises(expected_error): + Place.create_index( + {"fields": (")location.point", "name")}, + bucketSize=10, + ) + else: + Place.create_index({"fields": (")location.point", "name")}, bucketSize=10) + info = Place._get_collection().index_information() + info = [value["key"] for key, value in info.items()] + assert [("location.point", "geoHaystack"), ("name", 1)] in info def test_dictionary_indexes(self): """Ensure that indexes are used when meta[indexes] contains @@ -448,30 +467,73 @@ class Test(Document): == "IDHACK" ) - query_plan = Test.objects(a=1).only("a").exclude("id").explain() - assert ( - query_plan.get("queryPlanner") - .get("winningPlan") - .get("inputStage") - .get("stage") - == "IXSCAN" - ) mongo_db = get_mongodb_version() + query_plan = Test.objects(a=1).only("a").exclude("id").explain() + if mongo_db < MONGODB_70: + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage") + == "IXSCAN" + ) + else: + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("queryPlan") + .get("inputStage") + .get("stage") + == "IXSCAN" + ) + PROJECTION_STR = "PROJECTION" if mongo_db < MONGODB_42 else "PROJECTION_COVERED" - assert ( - query_plan.get("queryPlanner").get("winningPlan").get("stage") - == PROJECTION_STR - ) + if mongo_db < MONGODB_70: + assert ( + query_plan.get("queryPlanner").get("winningPlan").get("stage") + == PROJECTION_STR + ) + else: + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("queryPlan") + .get("stage") + == PROJECTION_STR + ) query_plan = Test.objects(a=1).explain() - assert ( - query_plan.get("queryPlanner") - .get("winningPlan") - .get("inputStage") - .get("stage") - == "IXSCAN" - ) - assert query_plan.get("queryPlanner").get("winningPlan").get("stage") == "FETCH" + if mongo_db < MONGODB_70: + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("inputStage") + .get("stage") + == "IXSCAN" + ) + else: + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("queryPlan") + .get("inputStage") + .get("stage") + == "IXSCAN" + ) + + if mongo_db < MONGODB_70: + assert ( + query_plan.get("queryPlanner").get("winningPlan").get("stage") + == "FETCH" + ) + else: + assert ( + query_plan.get("queryPlanner") + .get("winningPlan") + .get("queryPlan") + .get("stage") + == "FETCH" + ) def test_index_on_id(self): class BlogPost(Document): @@ -518,8 +580,12 @@ class BlogPost(Document): BlogPost.objects.hint("Bad Name").count() # Invalid shape argument (missing list brackets) should fail. - with pytest.raises(ValueError): - BlogPost.objects.hint(("tags", 1)).count() + if PYMONGO_VERSION <= (4, 3): + with pytest.raises(ValueError): + BlogPost.objects.hint(("tags", 1)).count() + else: + with pytest.raises(TypeError): + BlogPost.objects.hint(("tags", 1)).count() def test_collation(self): base = {"locale": "en", "strength": 2} @@ -549,7 +615,9 @@ class BlogPost(Document): incorrect_collation = {"arndom": "wrdo"} with pytest.raises(OperationFailure) as exc_info: BlogPost.objects.collation(incorrect_collation).count() - assert "Missing expected field" in str(exc_info.value) + assert "Missing expected field" in str( + exc_info.value + ) or "unknown field" in str(exc_info.value) query_result = BlogPost.objects.collation({}).order_by("name") assert [x.name for x in query_result] == sorted(names) @@ -963,44 +1031,52 @@ class Book(Document): def test_indexes_after_database_drop(self): """ - Test to ensure that indexes are re-created on a collection even - after the database has been dropped. + Test to ensure that indexes are not re-created on a collection + after the database has been dropped unless auto_create_index_on_save + is enabled. - Issue #812 + Issue #812 and #1446. """ # Use a new connection and database since dropping the database could # cause concurrent tests to fail. - connection = connect( - db="tempdatabase", alias="test_indexes_after_database_drop" - ) + tmp_alias = "test_indexes_after_database_drop" + connection = connect(db="tempdatabase", alias=tmp_alias) + self.addCleanup(connection.drop_database, "tempdatabase") class BlogPost(Document): - title = StringField() slug = StringField(unique=True) + meta = {"db_alias": tmp_alias} + + BlogPost.drop_collection() + BlogPost(slug="test").save() + with pytest.raises(NotUniqueError): + BlogPost(slug="test").save() + + # Drop the Database + connection.drop_database("tempdatabase") + BlogPost(slug="test").save() + # No error because the index was not recreated after dropping the database. + BlogPost(slug="test").save() + + # Repeat with auto_create_index_on_save: True. + class BlogPost2(Document): + slug = StringField(unique=True) + meta = { + "db_alias": tmp_alias, + "auto_create_index_on_save": True, + } - meta = {"db_alias": "test_indexes_after_database_drop"} + BlogPost2.drop_collection() + BlogPost2(slug="test").save() + with pytest.raises(NotUniqueError): + BlogPost2(slug="test").save() - try: - BlogPost.drop_collection() - - # Create Post #1 - post1 = BlogPost(title="test1", slug="test") - post1.save() - - # Drop the Database - connection.drop_database("tempdatabase") - - # Re-create Post #1 - post1 = BlogPost(title="test1", slug="test") - post1.save() - - # Create Post #2 - post2 = BlogPost(title="test2", slug="test") - with pytest.raises(NotUniqueError): - post2.save() - finally: - # Drop the temporary database at the end - connection.drop_database("tempdatabase") + # Drop the Database + connection.drop_database("tempdatabase") + BlogPost2(slug="test").save() + # Error because ensure_indexes is run on every save(). + with pytest.raises(NotUniqueError): + BlogPost2(slug="test").save() def test_index_dont_send_cls_option(self): """ diff --git a/tests/document/test_inheritance.py b/tests/document/test_inheritance.py index 550a4bdf3..09a207d5a 100644 --- a/tests/document/test_inheritance.py +++ b/tests/document/test_inheritance.py @@ -246,11 +246,15 @@ class Employee(Person): assert ["_cls", "age", "id", "name", "salary"] == sorted( Employee._fields.keys() ) - assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] - assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ + assert sorted(Person(name="Bob", age=35).to_mongo().keys()) == [ "_cls", + "age", "name", + ] + assert sorted(Employee(name="Bob", age=35, salary=0).to_mongo().keys()) == [ + "_cls", "age", + "name", "salary", ] assert Employee._get_collection_name() == Person._get_collection_name() @@ -334,7 +338,7 @@ class Dog(Animal): # Check that _cls etc aren't present on simple documents dog = Animal(name="dog").save() - assert dog.to_mongo().keys() == ["_id", "name"] + assert sorted(dog.to_mongo().keys()) == ["_id", "name"] collection = self.db[Animal._get_collection_name()] obj = collection.find_one() diff --git a/tests/document/test_instance.py b/tests/document/test_instance.py index 1469c9bb6..54c4bb37d 100644 --- a/tests/document/test_instance.py +++ b/tests/document/test_instance.py @@ -4,6 +4,7 @@ import uuid import weakref from datetime import datetime +from unittest.mock import Mock import bson import pytest @@ -28,7 +29,10 @@ MONGODB_36, get_mongodb_version, ) -from mongoengine.pymongo_support import list_collection_names +from mongoengine.pymongo_support import ( + PYMONGO_VERSION, + list_collection_names, +) from mongoengine.queryset import NULLIFY, Q from tests import fixtures from tests.fixtures import ( @@ -65,12 +69,12 @@ def tearDown(self): for collection in list_collection_names(self.db): self.db.drop_collection(collection) - def assertDbEqual(self, docs): + def _assert_db_equal(self, docs): assert list(self.Person._get_collection().find().sort("id")) == sorted( docs, key=lambda doc: doc["_id"] ) - def assertHasInstance(self, field, instance): + def _assert_has_instance(self, field, instance): assert hasattr(field, "_instance") assert field._instance is not None if isinstance(field._instance, weakref.ProxyType): @@ -126,7 +130,7 @@ class Log(Document): options = Log.objects._collection.options() assert options["capped"] is True assert options["max"] == 10 - assert options["size"] == 10 * 2 ** 20 + assert options["size"] == 10 * 2**20 # Check that the document with default value can be recreated class Log(Document): @@ -704,11 +708,15 @@ class Person(EmbeddedDocument): class Employee(Person): salary = IntField() - assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] - assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ + assert sorted(Person(name="Bob", age=35).to_mongo().keys()) == [ "_cls", + "age", "name", + ] + assert sorted(Employee(name="Bob", age=35, salary=0).to_mongo().keys()) == [ + "_cls", "age", + "name", "salary", ] @@ -717,7 +725,7 @@ class SubDoc(EmbeddedDocument): id = StringField(required=True) sub_doc = SubDoc(id="abc") - assert sub_doc.to_mongo().keys() == ["id"] + assert list(sub_doc.to_mongo().keys()) == ["id"] def test_embedded_document(self): """Ensure that embedded documents are set up correctly.""" @@ -740,11 +748,11 @@ class Doc(Document): Doc.drop_collection() doc = Doc(embedded_field=Embedded(string="Hi")) - self.assertHasInstance(doc.embedded_field, doc) + self._assert_has_instance(doc.embedded_field, doc) doc.save() doc = Doc.objects.get() - self.assertHasInstance(doc.embedded_field, doc) + self._assert_has_instance(doc.embedded_field, doc) def test_embedded_document_complex_instance(self): """Ensure that embedded documents in complex fields can reference @@ -759,11 +767,11 @@ class Doc(Document): Doc.drop_collection() doc = Doc(embedded_field=[Embedded(string="Hi")]) - self.assertHasInstance(doc.embedded_field[0], doc) + self._assert_has_instance(doc.embedded_field[0], doc) doc.save() doc = Doc.objects.get() - self.assertHasInstance(doc.embedded_field[0], doc) + self._assert_has_instance(doc.embedded_field[0], doc) def test_embedded_document_complex_instance_no_use_db_field(self): """Ensure that use_db_field is propagated to list of Emb Docs.""" @@ -792,11 +800,11 @@ class Account(Document): acc = Account() acc.email = Email(email="test@example.com") - self.assertHasInstance(acc._data["email"], acc) + self._assert_has_instance(acc._data["email"], acc) acc.save() acc1 = Account.objects.first() - self.assertHasInstance(acc1._data["email"], acc1) + self._assert_has_instance(acc1._data["email"], acc1) def test_instance_is_set_on_setattr_on_embedded_document_list(self): class Email(EmbeddedDocument): @@ -808,11 +816,11 @@ class Account(Document): Account.drop_collection() acc = Account() acc.emails = [Email(email="test@example.com")] - self.assertHasInstance(acc._data["emails"][0], acc) + self._assert_has_instance(acc._data["emails"][0], acc) acc.save() acc1 = Account.objects.first() - self.assertHasInstance(acc1._data["emails"][0], acc1) + self._assert_has_instance(acc1._data["emails"][0], acc1) def test_save_checks_that_clean_is_called(self): class CustomError(Exception): @@ -921,7 +929,7 @@ def test_modify_empty(self): with pytest.raises(InvalidDocumentError): self.Person().modify(set__age=10) - self.assertDbEqual([dict(doc.to_mongo())]) + self._assert_db_equal([dict(doc.to_mongo())]) def test_modify_invalid_query(self): doc1 = self.Person(name="bob", age=10).save() @@ -931,7 +939,7 @@ def test_modify_invalid_query(self): with pytest.raises(InvalidQueryError): doc1.modify({"id": doc2.id}, set__value=20) - self.assertDbEqual(docs) + self._assert_db_equal(docs) def test_modify_match_another_document(self): doc1 = self.Person(name="bob", age=10).save() @@ -941,7 +949,7 @@ def test_modify_match_another_document(self): n_modified = doc1.modify({"name": doc2.name}, set__age=100) assert n_modified == 0 - self.assertDbEqual(docs) + self._assert_db_equal(docs) def test_modify_not_exists(self): doc1 = self.Person(name="bob", age=10).save() @@ -951,7 +959,7 @@ def test_modify_not_exists(self): n_modified = doc2.modify({"name": doc2.name}, set__age=100) assert n_modified == 0 - self.assertDbEqual(docs) + self._assert_db_equal(docs) def test_modify_update(self): other_doc = self.Person(name="bob", age=10).save() @@ -977,7 +985,7 @@ def test_modify_update(self): assert doc.to_json() == doc_copy.to_json() assert doc._get_changed_fields() == [] - self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) + self._assert_db_equal([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) def test_modify_with_positional_push(self): class Content(EmbeddedDocument): @@ -1027,6 +1035,17 @@ def test_save(self): "_id": person.id, } + def test_save_write_concern(self): + class Recipient(Document): + email = EmailField(required=True) + + rec = Recipient(email="garbage@garbage.com") + + fn = Mock() + rec._save_create = fn + rec.save(write_concern={"w": 0}) + assert fn.call_args[1]["write_concern"] == {"w": 0} + def test_save_skip_validation(self): class Recipient(Document): email = EmailField(required=True) @@ -1507,7 +1526,6 @@ class Doc(Document): assert my_doc.int_field == 1 def test_document_update(self): - # try updating a non-saved document with pytest.raises(OperationError): person = self.Person(name="dcrosta") @@ -1728,7 +1746,7 @@ class UserSubscription(Document): user = User.objects.first() # Even if stored as ObjectId's internally mongoengine uses DBRefs - # As ObjectId's aren't automatically derefenced + # As ObjectId's aren't automatically dereferenced assert isinstance(user._data["orgs"][0], DBRef) assert isinstance(user.orgs[0], Organization) assert isinstance(user._data["orgs"][0], Organization) @@ -2751,17 +2769,17 @@ class Person(Document): from pymongo.collection import Collection - orig_update = Collection.update + orig_update_one = Collection.update_one try: - def fake_update(*args, **kwargs): + def fake_update_one(*args, **kwargs): self.fail("Unexpected update for %s" % args[0].name) - return orig_update(*args, **kwargs) + return orig_update_one(*args, **kwargs) - Collection.update = fake_update + Collection.update_one = fake_update_one person.save() finally: - Collection.update = orig_update + Collection.update_one = orig_update_one def test_db_alias_tests(self): """DB Alias tests.""" @@ -2939,7 +2957,11 @@ def __str__(self): } ) assert [str(b) for b in custom_qs] == ["1", "2"] - assert custom_qs.count() == 2 + + # count only will work with this raw query before pymongo 4.x, but + # the length is also implicitly checked above + if PYMONGO_VERSION < (4,): + assert custom_qs.count() == 2 def test_switch_db_instance(self): register_connection("testdb-1", "mongoenginetest2") @@ -3582,8 +3604,7 @@ class User(Document): cdt_fld = ComplexDateTimeField(null=True) User.objects.delete() - u = User(name="user") - u.save() + u = User(name="user").save() u_from_db = User.objects.get(name="user") u_from_db.height = None u_from_db.save() diff --git a/tests/fields/test_binary_field.py b/tests/fields/test_binary_field.py index e7bdfa2c1..f81777b5d 100644 --- a/tests/fields/test_binary_field.py +++ b/tests/fields/test_binary_field.py @@ -31,6 +31,14 @@ class Attachment(Document): assert MIME_TYPE == attachment_1.content_type assert BLOB == bytes(attachment_1.blob) + def test_bytearray_conversion_to_bytes(self): + class Dummy(Document): + blob = BinaryField() + + byte_arr = bytearray(b"\x00\x00\x00\x00\x00") + dummy = Dummy(blob=byte_arr) + assert isinstance(dummy.blob, bytes) + def test_validation_succeeds(self): """Ensure that valid values can be assigned to binary fields.""" diff --git a/tests/fields/test_complex_datetime_field.py b/tests/fields/test_complex_datetime_field.py index 82f332f68..205fb22f6 100644 --- a/tests/fields/test_complex_datetime_field.py +++ b/tests/fields/test_complex_datetime_field.py @@ -206,3 +206,9 @@ class Log(Document): with pytest.raises(ValidationError): log.save() + + def test_query_none_value_dont_raise(self): + class Log(Document): + timestamp = ComplexDateTimeField() + + _ = list(Log.objects(timestamp=None)) diff --git a/tests/fields/test_datetime_field.py b/tests/fields/test_datetime_field.py index 088e54a42..ced10b1bc 100644 --- a/tests/fields/test_datetime_field.py +++ b/tests/fields/test_datetime_field.py @@ -9,7 +9,7 @@ from mongoengine import * from mongoengine import connection -from tests.utils import MongoDBTestCase +from tests.utils import MongoDBTestCase, get_as_pymongo class TestDateTimeField(MongoDBTestCase): @@ -55,6 +55,21 @@ class Person(Document): assert person_created_t0 == person.created # make sure it does not change assert person._data["created"] == person.created + def test_set_using_callable(self): + # Weird feature but it's there for a while so let's make sure we don't break it + class Person(Document): + created = DateTimeField() + + Person.drop_collection() + + person = Person() + frozen_dt = dt.datetime(2020, 7, 25, 9, 56, 1) + person.created = lambda: frozen_dt + person.save() + + assert callable(person.created) + assert get_as_pymongo(person) == {"_id": person.id, "created": frozen_dt} + def test_handling_microseconds(self): """Tests showing pymongo datetime fields handling of microseconds. Microseconds are rounded to the nearest millisecond and pre UTC diff --git a/tests/fields/test_decimal128_field.py b/tests/fields/test_decimal128_field.py new file mode 100644 index 000000000..6aa2ec23e --- /dev/null +++ b/tests/fields/test_decimal128_field.py @@ -0,0 +1,149 @@ +import json +import random +from decimal import Decimal + +import pytest +from bson.decimal128 import Decimal128 + +from mongoengine import Decimal128Field, Document, ValidationError +from tests.utils import MongoDBTestCase, get_as_pymongo + + +class Decimal128Document(Document): + dec128_fld = Decimal128Field() + dec128_min_0 = Decimal128Field(min_value=0) + dec128_max_100 = Decimal128Field(max_value=100) + + +def generate_test_cls() -> Document: + Decimal128Document.drop_collection() + Decimal128Document(dec128_fld=None).save() + Decimal128Document(dec128_fld=Decimal(1)).save() + return Decimal128Document + + +class TestDecimal128Field(MongoDBTestCase): + def test_decimal128_validation_good(self): + doc = Decimal128Document() + + doc.dec128_fld = Decimal(0) + doc.validate() + + doc.dec128_fld = Decimal(50) + doc.validate() + + doc.dec128_fld = Decimal(110) + doc.validate() + + doc.dec128_fld = Decimal("110") + doc.validate() + + def test_decimal128_validation_invalid(self): + """Ensure that invalid values cannot be assigned.""" + + doc = Decimal128Document() + + doc.dec128_fld = "ten" + + with pytest.raises(ValidationError): + doc.validate() + + def test_decimal128_validation_min(self): + """Ensure that out of bounds values cannot be assigned.""" + + doc = Decimal128Document() + + doc.dec128_min_0 = Decimal(50) + doc.validate() + + doc.dec128_min_0 = Decimal(-1) + with pytest.raises(ValidationError): + doc.validate() + + def test_decimal128_validation_max(self): + """Ensure that out of bounds values cannot be assigned.""" + + doc = Decimal128Document() + + doc.dec128_max_100 = Decimal(50) + doc.validate() + + doc.dec128_max_100 = Decimal(101) + with pytest.raises(ValidationError): + doc.validate() + + def test_eq_operator(self): + cls = generate_test_cls() + assert cls.objects(dec128_fld=1.0).count() == 1 + assert cls.objects(dec128_fld=2.0).count() == 0 + + def test_ne_operator(self): + cls = generate_test_cls() + assert cls.objects(dec128_fld__ne=None).count() == 1 + assert cls.objects(dec128_fld__ne=1).count() == 1 + assert cls.objects(dec128_fld__ne=1.0).count() == 1 + + def test_gt_operator(self): + cls = generate_test_cls() + assert cls.objects(dec128_fld__gt=0.5).count() == 1 + + def test_lt_operator(self): + cls = generate_test_cls() + assert cls.objects(dec128_fld__lt=1.5).count() == 1 + + def test_field_exposed_as_python_Decimal(self): + # from int + model = Decimal128Document(dec128_fld=100).save() + assert isinstance(model.dec128_fld, Decimal) + model = Decimal128Document.objects.get(id=model.id) + assert isinstance(model.dec128_fld, Decimal) + assert model.dec128_fld == Decimal("100") + + def test_storage(self): + # from int + model = Decimal128Document(dec128_fld=100).save() + assert get_as_pymongo(model) == { + "_id": model.id, + "dec128_fld": Decimal128("100"), + } + + # from str + model = Decimal128Document(dec128_fld="100.0").save() + assert get_as_pymongo(model) == { + "_id": model.id, + "dec128_fld": Decimal128("100.0"), + } + + # from float + model = Decimal128Document(dec128_fld=100.0).save() + assert get_as_pymongo(model) == { + "_id": model.id, + "dec128_fld": Decimal128("100"), + } + + # from Decimal + model = Decimal128Document(dec128_fld=Decimal(100)).save() + assert get_as_pymongo(model) == { + "_id": model.id, + "dec128_fld": Decimal128("100"), + } + model = Decimal128Document(dec128_fld=Decimal("100.0")).save() + assert get_as_pymongo(model) == { + "_id": model.id, + "dec128_fld": Decimal128("100.0"), + } + + # from Decimal128 + model = Decimal128Document(dec128_fld=Decimal128("100")).save() + assert get_as_pymongo(model) == { + "_id": model.id, + "dec128_fld": Decimal128("100"), + } + + def test_json(self): + Decimal128Document.drop_collection() + f = str(random.random()) + Decimal128Document(dec128_fld=f).save() + json_str = Decimal128Document.objects.to_json() + array = json.loads(json_str) + assert array[0]["dec128_fld"] == {"$numberDecimal": str(f)} diff --git a/tests/fields/test_decimal_field.py b/tests/fields/test_decimal_field.py index 89a725a9e..0952eb64f 100644 --- a/tests/fields/test_decimal_field.py +++ b/tests/fields/test_decimal_field.py @@ -71,6 +71,8 @@ class Person(Document): fetched_person = Person.objects.first() fetched_person.value is None + assert Person.objects(value=None).first() is not None + def test_validation(self): """Ensure that invalid values cannot be assigned to decimal fields.""" @@ -118,3 +120,23 @@ class Person(Document): assert 2 == Person.objects(money__gt="7").count() assert 3 == Person.objects(money__gte="7").count() + + def test_precision_0(self): + """prevent regression of a bug that was raising an exception when using precision=0""" + + class TestDoc(Document): + d = DecimalField(precision=0) + + TestDoc.drop_collection() + + td = TestDoc(d=Decimal("12.00032678131263")) + assert td.d == Decimal("12") + + def test_precision_negative_raise(self): + """prevent regression of a bug that was raising an exception when using precision=0""" + with pytest.raises( + ValidationError, match="precision must be a positive integer" + ): + + class TestDoc(Document): + dneg = DecimalField(precision=-1) diff --git a/tests/fields/test_dict_field.py b/tests/fields/test_dict_field.py index 63d5c5279..4da29d9a3 100644 --- a/tests/fields/test_dict_field.py +++ b/tests/fields/test_dict_field.py @@ -324,6 +324,7 @@ class Simple(Document): def test_dictfield_with_referencefield_complex_nesting_cases(self): """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" + # Relates to Issue #1453 class Doc(Document): s = StringField() diff --git a/tests/fields/test_embedded_document_field.py b/tests/fields/test_embedded_document_field.py index 0e9784ffe..fefee4efd 100644 --- a/tests/fields/test_embedded_document_field.py +++ b/tests/fields/test_embedded_document_field.py @@ -1,14 +1,19 @@ +from copy import deepcopy + import pytest +from bson import ObjectId from mongoengine import ( Document, EmbeddedDocument, EmbeddedDocumentField, + EmbeddedDocumentListField, GenericEmbeddedDocumentField, IntField, InvalidQueryError, ListField, LookUpError, + MapField, StringField, ValidationError, ) @@ -57,6 +62,48 @@ class MyFailingDoc(Document): class MyFailingdoc2(Document): emb = EmbeddedDocumentField("MyDoc") + def test_embedded_document_field_validate_subclass(self): + class BaseItem(EmbeddedDocument): + f = IntField() + + meta = {"allow_inheritance": True} + + def validate(self, clean=True): + if self.f == 0: + raise Exception("can not be 0") + return super().validate(clean) + + class RealItem(BaseItem): + a = IntField() + + def validate(self, clean=True): + if self.f == 1: + raise Exception("can not be 1") + return super().validate(clean) + + class TopLevel(Document): + item = EmbeddedDocumentField(document_type=BaseItem) + items = EmbeddedDocumentListField(document_type=BaseItem) + + passing_item = RealItem(f=2, a=0) + item = TopLevel(item=passing_item, items=[passing_item]) + item.validate() + + failing_item = RealItem(f=1, a=0) + item = TopLevel(item=failing_item) + with pytest.raises(Exception, match="can not be 1"): + item.validate() + + item = TopLevel(items=[failing_item]) + with pytest.raises(Exception, match="can not be 1"): + item.validate() + + # verify that super calls the parent + failing_item_in_base = RealItem(f=0, a=0) + item = TopLevel(item=failing_item_in_base) + with pytest.raises(Exception, match="can not be 0"): + item.validate() + def test_query_embedded_document_attribute(self): class AdminSettings(EmbeddedDocument): foo1 = StringField() @@ -350,3 +397,30 @@ class Person(Document): # Test existing attribute assert Person.objects(settings__base_foo="basefoo").first().id == p.id assert Person.objects(settings__sub_foo="subfoo").first().id == p.id + + def test_deepcopy_set__instance(self): + """Ensure that the _instance attribute on EmbeddedDocument exists after a deepcopy""" + + class Wallet(EmbeddedDocument): + money = IntField() + + class Person(Document): + wallet = EmbeddedDocumentField(Wallet) + wallet_map = MapField(EmbeddedDocumentField(Wallet)) + + # Test on fresh EmbeddedDoc + emb_doc = Wallet(money=1) + assert emb_doc._instance is None + copied_emb_doc = deepcopy(emb_doc) + assert copied_emb_doc._instance is None + + # Test on attached EmbeddedDoc + doc = Person( + id=ObjectId(), wallet=Wallet(money=2), wallet_map={"test": Wallet(money=2)} + ) + assert doc.wallet._instance == doc + copied_emb_doc = deepcopy(doc.wallet) + assert copied_emb_doc._instance is None + + copied_map_emb_doc = deepcopy(doc.wallet_map) + assert copied_map_emb_doc["test"]._instance is None diff --git a/tests/fields/test_enum_field.py b/tests/fields/test_enum_field.py index d2730d334..86befabba 100644 --- a/tests/fields/test_enum_field.py +++ b/tests/fields/test_enum_field.py @@ -3,7 +3,13 @@ import pytest from bson import InvalidDocument -from mongoengine import Document, EnumField, ValidationError +from mongoengine import ( + DictField, + Document, + EnumField, + ListField, + ValidationError, +) from tests.utils import MongoDBTestCase, get_as_pymongo @@ -21,6 +27,12 @@ class ModelWithEnum(Document): status = EnumField(Status) +class ModelComplexEnum(Document): + status = EnumField(Status) + statuses = ListField(EnumField(Status)) + color_mapping = DictField(EnumField(Color)) + + class TestStringEnumField(MongoDBTestCase): def test_storage(self): model = ModelWithEnum(status=Status.NEW).save() @@ -101,6 +113,42 @@ def test_wrong_choices(self): with pytest.raises(ValueError, match="Invalid choices"): EnumField(Status, choices=[Status.DONE, Color.RED]) + def test_embedding_in_complex_field(self): + ModelComplexEnum.drop_collection() + model = ModelComplexEnum( + status="new", statuses=["new"], color_mapping={"red": 1} + ).save() + assert model.status == Status.NEW + assert model.statuses == [Status.NEW] + assert model.color_mapping == {"red": Color.RED} + + model.reload() + assert model.status == Status.NEW + assert model.statuses == [Status.NEW] + assert model.color_mapping == {"red": Color.RED} + + model.status = "done" + model.color_mapping = {"blue": 2} + model.statuses = ["new", "done"] + model.save() + assert model.status == Status.DONE + assert model.statuses == [Status.NEW, Status.DONE] + assert model.color_mapping == {"blue": Color.BLUE} + + model.reload() + assert model.status == Status.DONE + assert model.color_mapping == {"blue": Color.BLUE} + assert model.statuses == [Status.NEW, Status.DONE] + + with pytest.raises(ValidationError, match="must be one of ..Status"): + model.statuses = [1] + model.save() + + model.statuses = ["done"] + model.color_mapping = {"blue": "done"} + with pytest.raises(ValidationError, match="must be one of ..Color"): + model.save() + class ModelWithColor(Document): color = EnumField(Color, default=Color.RED) @@ -124,10 +172,7 @@ def test_storage_enum_with_int(self): assert get_as_pymongo(model) == {"_id": model.id, "color": 2} def test_validate_model(self): - with pytest.raises(ValidationError, match="Value must be one of"): - ModelWithColor(color=3).validate() - - with pytest.raises(ValidationError, match="Value must be one of"): + with pytest.raises(ValidationError, match="must be one of ..Color"): ModelWithColor(color="wrong_type").validate() diff --git a/tests/fields/test_fields.py b/tests/fields/test_fields.py index 69fe14712..d95e2fce0 100644 --- a/tests/fields/test_fields.py +++ b/tests/fields/test_fields.py @@ -44,6 +44,46 @@ class TestField(MongoDBTestCase): + def test_constructor_set_historical_behavior_is_kept(self): + class MyDoc(Document): + oid = ObjectIdField() + + doc = MyDoc() + doc.oid = str(ObjectId()) + assert isinstance(doc.oid, str) + + # not modified on save (historical behavior) + doc.save() + assert isinstance(doc.oid, str) + + # reloading goes through constructor so it is expected to go through to_python + doc.reload() + assert isinstance(doc.oid, ObjectId) + + def test_constructor_set_list_field_historical_behavior_is_kept(self): + # Although the behavior is not consistent between regular field and a ListField + # This is the historical behavior so we must make sure we don't modify it (unless consciously done of course) + + class MyOIDSDoc(Document): + oids = ListField(ObjectIdField()) + + # constructor goes through to_python so casting occurs + doc = MyOIDSDoc(oids=[str(ObjectId())]) + assert isinstance(doc.oids[0], ObjectId) + + # constructor goes through to_python so casting occurs + doc = MyOIDSDoc() + doc.oids = [str(ObjectId())] + assert isinstance(doc.oids[0], str) + + doc.save() + assert isinstance(doc.oids[0], str) + + # reloading goes through constructor so it is expected to go through to_python + # and cast + doc.reload() + assert isinstance(doc.oids[0], ObjectId) + def test_default_values_nothing_set(self): """Ensure that default field values are used when creating a document. @@ -126,7 +166,7 @@ class Person(Document): def test_default_values_set_to_None(self): """Ensure that default field values are used even when - we explcitly initialize the doc with None values. + we explicitly initialize the doc with None values. """ class Person(Document): @@ -199,6 +239,7 @@ def test_default_value_is_not_used_when_changing_value_to_empty_list_for_strict_ self, ): """List field with default can be set to the empty list (strict)""" + # Issue #1733 class Doc(Document): x = ListField(IntField(), default=lambda: [42]) @@ -213,6 +254,7 @@ def test_default_value_is_not_used_when_changing_value_to_empty_list_for_dyn_doc self, ): """List field with default can be set to the empty list (dynamic)""" + # Issue #1733 class Doc(DynamicDocument): x = ListField(IntField(), default=lambda: [42]) @@ -2077,7 +2119,7 @@ def test_tuples_as_tuples(self): a ComplexBaseField. """ - class EnumField(BaseField): + class SomeField(BaseField): def __init__(self, **kwargs): super().__init__(**kwargs) @@ -2088,7 +2130,7 @@ def to_python(self, value): return tuple(value) class TestDoc(Document): - items = ListField(EnumField()) + items = ListField(SomeField()) TestDoc.drop_collection() diff --git a/tests/fields/test_file_field.py b/tests/fields/test_file_field.py index 8c0db044a..43bb0fdbf 100644 --- a/tests/fields/test_file_field.py +++ b/tests/fields/test_file_field.py @@ -328,7 +328,7 @@ class TestFile(Document): assert len(list(files)) == 1 assert len(list(chunks)) == 1 - # Deleting the docoument should delete the files + # Deleting the document should delete the files testfile.delete() files = db.fs.files.find() @@ -535,7 +535,6 @@ class TestFile(Document): @require_pil def test_get_image_by_grid_id(self): class TestImage(Document): - image1 = ImageField() image2 = ImageField() diff --git a/tests/fields/test_float_field.py b/tests/fields/test_float_field.py index 30f829ae8..b09ddf95f 100644 --- a/tests/fields/test_float_field.py +++ b/tests/fields/test_float_field.py @@ -51,9 +51,15 @@ class BigPerson(Document): big_person.height = int(0) big_person.validate() - big_person.height = 2 ** 500 + big_person.height = 2**500 big_person.validate() - big_person.height = 2 ** 100000 # Too big for a float value + big_person.height = 2**100000 # Too big for a float value with pytest.raises(ValidationError): big_person.validate() + + def test_query_none_value_dont_raise(self): + class BigPerson(Document): + height = FloatField() + + _ = list(BigPerson.objects(height=None)) diff --git a/tests/fields/test_geo_fields.py b/tests/fields/test_geo_fields.py index 3c63b1686..75b066751 100644 --- a/tests/fields/test_geo_fields.py +++ b/tests/fields/test_geo_fields.py @@ -369,7 +369,6 @@ class Parent(Document): assert len(Location._geo_indices()) == 1 def test_geo_indexes_auto_index(self): - # Test just listing the fields class Log(Document): location = PointField(auto_index=False) diff --git a/tests/fields/test_object_id_field.py b/tests/fields/test_object_id_field.py new file mode 100644 index 000000000..3503b82e9 --- /dev/null +++ b/tests/fields/test_object_id_field.py @@ -0,0 +1,37 @@ +import pytest +from bson import ObjectId + +from mongoengine import Document, ObjectIdField, ValidationError +from tests.utils import MongoDBTestCase, get_as_pymongo + + +class TestObjectIdField(MongoDBTestCase): + def test_storage(self): + class MyDoc(Document): + oid = ObjectIdField() + + doc = MyDoc(oid=ObjectId()) + doc.save() + assert get_as_pymongo(doc) == {"_id": doc.id, "oid": doc.oid} + + def test_constructor_converts_str_to_ObjectId(self): + class MyDoc(Document): + oid = ObjectIdField() + + doc = MyDoc(oid=str(ObjectId())) + assert isinstance(doc.oid, ObjectId) + + def test_validation_works(self): + class MyDoc(Document): + oid = ObjectIdField() + + doc = MyDoc(oid="not-an-oid!") + with pytest.raises(ValidationError, match="Invalid ObjectID"): + doc.save() + + def test_query_none_value_dont_raise(self): + # cf issue #2681 + class MyDoc(Document): + oid = ObjectIdField(null=True) + + _ = list(MyDoc.objects(oid=None)) diff --git a/tests/queryset/test_geo.py b/tests/queryset/test_geo.py index 0fe3af97a..e87d27aea 100644 --- a/tests/queryset/test_geo.py +++ b/tests/queryset/test_geo.py @@ -2,6 +2,7 @@ import unittest from mongoengine import * +from mongoengine.pymongo_support import PYMONGO_VERSION from tests.utils import MongoDBTestCase @@ -47,13 +48,15 @@ def test_near(self): # note that "near" will show the san francisco event, too, # although it sorts to last. events = self.Event.objects(location__near=[-87.67892, 41.9120459]) - assert events.count() == 3 + if PYMONGO_VERSION < (4,): + assert events.count() == 3 assert list(events) == [event1, event3, event2] # ensure ordering is respected by "near" events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = events.order_by("-date") - assert events.count() == 3 + if PYMONGO_VERSION < (4,): + assert events.count() == 3 assert list(events) == [event3, event1, event2] def test_near_and_max_distance(self): @@ -65,8 +68,9 @@ def test_near_and_max_distance(self): # find events within 10 degrees of san francisco point = [-122.415579, 37.7566023] events = self.Event.objects(location__near=point, location__max_distance=10) - assert events.count() == 1 - assert events[0] == event2 + if PYMONGO_VERSION < (4,): + assert events.count() == 1 + assert list(events) == [event2] def test_near_and_min_distance(self): """Ensure the "min_distance" operator works alongside the "near" @@ -77,7 +81,9 @@ def test_near_and_min_distance(self): # find events at least 10 degrees away of san francisco point = [-122.415579, 37.7566023] events = self.Event.objects(location__near=point, location__min_distance=10) - assert events.count() == 2 + if PYMONGO_VERSION < (4,): + assert events.count() == 2 + assert list(events) == [event3, event1] def test_within_distance(self): """Make sure the "within_distance" operator works.""" @@ -153,13 +159,15 @@ def test_2dsphere_near(self): # note that "near" will show the san francisco event, too, # although it sorts to last. events = self.Event.objects(location__near=[-87.67892, 41.9120459]) - assert events.count() == 3 + if PYMONGO_VERSION < (4,): + assert events.count() == 3 assert list(events) == [event1, event3, event2] # ensure ordering is respected by "near" events = self.Event.objects(location__near=[-87.67892, 41.9120459]) events = events.order_by("-date") - assert events.count() == 3 + if PYMONGO_VERSION < (4,): + assert events.count() == 3 assert list(events) == [event3, event1, event2] def test_2dsphere_near_and_max_distance(self): @@ -171,21 +179,25 @@ def test_2dsphere_near_and_max_distance(self): # find events within 10km of san francisco point = [-122.415579, 37.7566023] events = self.Event.objects(location__near=point, location__max_distance=10000) - assert events.count() == 1 - assert events[0] == event2 + if PYMONGO_VERSION < (4,): + assert events.count() == 1 + assert list(events) == [event2] # find events within 1km of greenpoint, broolyn, nyc, ny events = self.Event.objects( location__near=[-73.9509714, 40.7237134], location__max_distance=1000 ) - assert events.count() == 0 + if PYMONGO_VERSION < (4,): + assert events.count() == 0 + assert list(events) == [] # ensure ordering is respected by "near" events = self.Event.objects( location__near=[-87.67892, 41.9120459], location__max_distance=10000 ).order_by("-date") - assert events.count() == 2 - assert events[0] == event3 + if PYMONGO_VERSION < (4,): + assert events.count() == 2 + assert list(events) == [event3, event1] def test_2dsphere_geo_within_box(self): """Ensure the "geo_within_box" operator works with a 2dsphere @@ -225,7 +237,7 @@ def test_2dsphere_geo_within_polygon(self): assert events.count() == 0 def test_2dsphere_near_and_min_max_distance(self): - """Ensure "min_distace" and "max_distance" operators work well + """Ensure "min_distance" and "max_distance" operators work well together with the "near" operator in a 2dsphere index. """ event1, event2, event3 = self._create_event_data(point_field_class=PointField) @@ -236,15 +248,17 @@ def test_2dsphere_near_and_min_max_distance(self): location__min_distance=1000, location__max_distance=10000, ).order_by("-date") - assert events.count() == 1 - assert events[0] == event3 + if PYMONGO_VERSION < (4,): + assert events.count() == 1 + assert list(events) == [event3] # ensure ordering is respected by "near" with "min_distance" events = self.Event.objects( location__near=[-87.67892, 41.9120459], location__min_distance=10000 ).order_by("-date") - assert events.count() == 1 - assert events[0] == event2 + if PYMONGO_VERSION < (4,): + assert events.count() == 1 + assert list(events) == [event2] def test_2dsphere_geo_within_center(self): """Make sure the "geo_within_center" operator works with a @@ -289,7 +303,8 @@ class Event(Document): # note that "near" will show the san francisco event, too, # although it sorts to last. events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) - assert events.count() == 3 + if PYMONGO_VERSION < (4,): + assert events.count() == 3 assert list(events) == [event1, event3, event2] def test_geo_spatial_embedded(self): @@ -318,7 +333,9 @@ class Point(Document): # Finds both points because they are within 60 km of the reference # point equidistant between them. points = Point.objects(location__near_sphere=[-122, 37.5]) - assert points.count() == 2 + if PYMONGO_VERSION < (4,): + assert points.count() == 2 + assert list(points) == [north_point, south_point] # Same behavior for _within_spherical_distance points = Point.objects( @@ -329,36 +346,42 @@ class Point(Document): points = Point.objects( location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius ) - assert points.count() == 2 + if PYMONGO_VERSION < (4,): + assert points.count() == 2 + assert list(points) == [north_point, south_point] # Test query works with max_distance, being farer from one point points = Point.objects( location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius ) close_point = points.first() - assert points.count() == 1 + if PYMONGO_VERSION < (4,): + assert points.count() == 1 + assert list(points) == [north_point] # Test query works with min_distance, being farer from one point points = Point.objects( location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius ) - assert points.count() == 1 + if PYMONGO_VERSION < (4,): + assert points.count() == 1 far_point = points.first() + assert list(points) == [south_point] assert close_point != far_point # Finds both points, but orders the north point first because it's # closer to the reference point to the north. points = Point.objects(location__near_sphere=[-122, 38.5]) - assert points.count() == 2 - assert points[0].id == north_point.id - assert points[1].id == south_point.id + if PYMONGO_VERSION < (4,): + assert points.count() == 2 + assert list(points) == [north_point, south_point] # Finds both points, but orders the south point first because it's # closer to the reference point to the south. points = Point.objects(location__near_sphere=[-122, 36.5]) - assert points.count() == 2 - assert points[0].id == south_point.id - assert points[1].id == north_point.id + if PYMONGO_VERSION < (4,): + assert points.count() == 2 + assert list(points) == [south_point, north_point] # Finds only one point because only the first point is within 60km of # the reference point to the south. @@ -375,56 +398,72 @@ class Road(Document): Road.drop_collection() - Road(name="66", line=[[40, 5], [41, 6]]).save() + road = Road(name="66", line=[[40, 5], [41, 6]]) + road.save() # near point = {"type": "Point", "coordinates": [40, 5]} - roads = Road.objects.filter(line__near=point["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(line__near=point["coordinates"]) + if PYMONGO_VERSION < (4,): + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__near=point).count() - assert 1 == roads + roads = Road.objects.filter(line__near=point) + if PYMONGO_VERSION < (4,): + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__near={"$geometry": point}).count() - assert 1 == roads + roads = Road.objects.filter(line__near={"$geometry": point}) + if PYMONGO_VERSION < (4,): + assert roads.count() == 1 + assert list(roads) == [road] # Within polygon = { "type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } - roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_within=polygon["coordinates"]) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__geo_within=polygon).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_within=polygon) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_within={"$geometry": polygon}) + assert roads.count() == 1 + assert list(roads) == [road] # Intersects line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]} - roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_intersects=line["coordinates"]) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__geo_intersects=line).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_intersects=line) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_intersects={"$geometry": line}) + assert roads.count() == 1 + assert list(roads) == [road] polygon = { "type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } - roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__geo_intersects=polygon).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_intersects=polygon) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count() - assert 1 == roads + roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}) + assert roads.count() == 1 + assert list(roads) == [road] def test_polygon(self): class Road(Document): @@ -433,56 +472,72 @@ class Road(Document): Road.drop_collection() - Road(name="66", poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() + road = Road(name="66", poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]) + road.save() # near point = {"type": "Point", "coordinates": [40, 5]} - roads = Road.objects.filter(poly__near=point["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(poly__near=point["coordinates"]) + if PYMONGO_VERSION < (4,): + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__near=point).count() - assert 1 == roads + roads = Road.objects.filter(poly__near=point) + if PYMONGO_VERSION < (4,): + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__near={"$geometry": point}).count() - assert 1 == roads + roads = Road.objects.filter(poly__near={"$geometry": point}) + if PYMONGO_VERSION < (4,): + assert roads.count() == 1 + assert list(roads) == [road] # Within polygon = { "type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } - roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__geo_within=polygon).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_within=polygon) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}) + assert roads.count() == 1 + assert list(roads) == [road] # Intersects line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]} - roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__geo_intersects=line).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_intersects=line) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}) + assert roads.count() == 1 + assert list(roads) == [road] polygon = { "type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], } - roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__geo_intersects=polygon).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_intersects=polygon) + assert roads.count() == 1 + assert list(roads) == [road] - roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() - assert 1 == roads + roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}) + assert roads.count() == 1 + assert list(roads) == [road] def test_aspymongo_with_only(self): """Ensure as_pymongo works with only""" diff --git a/tests/queryset/test_modify.py b/tests/queryset/test_modify.py index 15e3af349..d5cedb2c1 100644 --- a/tests/queryset/test_modify.py +++ b/tests/queryset/test_modify.py @@ -1,5 +1,7 @@ import unittest +import pytest + from mongoengine import ( Document, IntField, @@ -19,7 +21,7 @@ def setUp(self): connect(db="mongoenginetest") Doc.drop_collection() - def assertDbEqual(self, docs): + def _assert_db_equal(self, docs): assert list(Doc._collection.find().sort("id")) == docs def test_modify(self): @@ -28,7 +30,14 @@ def test_modify(self): old_doc = Doc.objects(id=1).modify(set__value=-1) assert old_doc.to_json() == doc.to_json() - self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + + def test_modify_full_response_raise_value_error_for_recent_mongo(self): + Doc(id=0, value=0).save() + Doc(id=1, value=1).save() + + with pytest.raises(ValueError): + Doc.objects(id=1).modify(set__value=-1, full_response=True) def test_modify_with_new(self): Doc(id=0, value=0).save() @@ -37,18 +46,18 @@ def test_modify_with_new(self): new_doc = Doc.objects(id=1).modify(set__value=-1, new=True) doc.value = -1 assert new_doc.to_json() == doc.to_json() - self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_not_existing(self): Doc(id=0, value=0).save() assert Doc.objects(id=1).modify(set__value=-1) is None - self.assertDbEqual([{"_id": 0, "value": 0}]) + self._assert_db_equal([{"_id": 0, "value": 0}]) def test_modify_with_upsert(self): Doc(id=0, value=0).save() old_doc = Doc.objects(id=1).modify(set__value=1, upsert=True) assert old_doc is None - self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) + self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) def test_modify_with_upsert_existing(self): Doc(id=0, value=0).save() @@ -56,13 +65,13 @@ def test_modify_with_upsert_existing(self): old_doc = Doc.objects(id=1).modify(set__value=-1, upsert=True) assert old_doc.to_json() == doc.to_json() - self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_upsert_with_new(self): Doc(id=0, value=0).save() new_doc = Doc.objects(id=1).modify(upsert=True, new=True, set__value=1) assert new_doc.to_mongo() == {"_id": 1, "value": 1} - self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) + self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": 1}]) def test_modify_with_remove(self): Doc(id=0, value=0).save() @@ -70,12 +79,12 @@ def test_modify_with_remove(self): old_doc = Doc.objects(id=1).modify(remove=True) assert old_doc.to_json() == doc.to_json() - self.assertDbEqual([{"_id": 0, "value": 0}]) + self._assert_db_equal([{"_id": 0, "value": 0}]) def test_find_and_modify_with_remove_not_existing(self): Doc(id=0, value=0).save() assert Doc.objects(id=1).modify(remove=True) is None - self.assertDbEqual([{"_id": 0, "value": 0}]) + self._assert_db_equal([{"_id": 0, "value": 0}]) def test_modify_with_order_by(self): Doc(id=0, value=3).save() @@ -85,7 +94,7 @@ def test_modify_with_order_by(self): old_doc = Doc.objects().order_by("-id").modify(set__value=-1) assert old_doc.to_json() == doc.to_json() - self.assertDbEqual( + self._assert_db_equal( [ {"_id": 0, "value": 3}, {"_id": 1, "value": 2}, @@ -100,7 +109,7 @@ def test_modify_with_fields(self): old_doc = Doc.objects(id=1).only("id").modify(set__value=-1) assert old_doc.to_mongo() == {"_id": 1} - self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) + self._assert_db_equal([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) def test_modify_with_push(self): class BlogPost(Document): diff --git a/tests/queryset/test_queryset.py b/tests/queryset/test_queryset.py index 1aa4f32a3..d1cae3415 100644 --- a/tests/queryset/test_queryset.py +++ b/tests/queryset/test_queryset.py @@ -24,7 +24,9 @@ QuerySetManager, queryset_manager, ) +from mongoengine.queryset.base import BaseQuerySet from tests.utils import ( + requires_mongodb_gte_42, requires_mongodb_gte_44, requires_mongodb_lt_42, ) @@ -149,6 +151,14 @@ def test_limit_0_returns_all_documents(self): persons = list(self.Person.objects().limit(0)) assert len(persons) == 2 == n_docs + def test_limit_0(self): + """Ensure that QuerySet.limit works as expected.""" + self.Person.objects.create(name="User A", age=20) + + # Test limit with 0 as parameter + qs = self.Person.objects.limit(0) + assert qs.count() == 0 + def test_limit(self): """Ensure that QuerySet.limit works as expected.""" user_a = self.Person.objects.create(name="User A", age=20) @@ -407,6 +417,19 @@ class A(Document): A.drop_collection() A().save() + # validate collection not empty + assert A.objects.count() == 1 + + # update operations + assert A.objects.none().update(s="1") == 0 + assert A.objects.none().update_one(s="1") == 0 + assert A.objects.none().modify(s="1") is None + + # validate noting change by update operations + assert A.objects(s="1").count() == 0 + + # fetch queries + assert A.objects.none().first() is None assert list(A.objects.none()) == [] assert list(A.objects.none().all()) == [] assert list(A.objects.none().limit(1)) == [] @@ -594,7 +617,6 @@ class BlogPost(Document): assert post.comments[1].votes == 8 def test_update_using_positional_operator_matches_first(self): - # Currently the $ operator only applies to the first matched item in # the query @@ -867,6 +889,21 @@ def test_set_on_insert(self): assert "Bob" == bob.name assert 30 == bob.age + def test_rename(self): + self.Person.drop_collection() + self.Person.objects.create(name="Foo", age=11) + + bob = self.Person.objects.as_pymongo().first() + assert "age" in bob + assert bob["age"] == 11 + + self.Person.objects(name="Foo").update(rename__age="person_age") + + bob = self.Person.objects.as_pymongo().first() + assert "age" not in bob + assert "person_age" in bob + assert bob["person_age"] == 11 + def test_save_and_only_on_fields_with_default(self): class Embed(EmbeddedDocument): field = IntField() @@ -1240,6 +1277,34 @@ def test_regex_query_shortcuts(self): obj = self.Person.objects(name__iexact="gUIDO VAN rOSSU").first() assert obj is None + # Test wholeword + obj = self.Person.objects(name__wholeword="Guido").first() + assert obj == person + obj = self.Person.objects(name__wholeword="rossum").first() + assert obj is None + obj = self.Person.objects(name__wholeword="Rossu").first() + assert obj is None + + # Test iwholeword + obj = self.Person.objects(name__iwholeword="rOSSUM").first() + assert obj == person + obj = self.Person.objects(name__iwholeword="rOSSU").first() + assert obj is None + + # Test regex + obj = self.Person.objects(name__regex="^[Guido].*[Rossum]$").first() + assert obj == person + obj = self.Person.objects(name__regex="^[guido].*[rossum]$").first() + assert obj is None + obj = self.Person.objects(name__regex="^[uido].*[Rossum]$").first() + assert obj is None + + # Test iregex + obj = self.Person.objects(name__iregex="^[guido].*[rossum]$").first() + assert obj == person + obj = self.Person.objects(name__iregex="^[Uido].*[Rossum]$").first() + assert obj is None + # Test unsafe expressions person = self.Person(name="Guido van Rossum [.'Geek']") person.save() @@ -1324,7 +1389,14 @@ def test_filter_chaining_with_regex(self): person.save() people = self.Person.objects - people = people.filter(name__startswith="Gui").filter(name__not__endswith="tum") + people = ( + people.filter(name__startswith="Gui") + .filter(name__not__endswith="tum") + .filter(name__icontains="VAN") + .filter(name__regex="^Guido") + .filter(name__wholeword="Guido") + .filter(name__wholeword="van") + ) assert people.count() == 1 def assertSequence(self, qs, expected): @@ -2167,6 +2239,36 @@ class BlogPost(Document): post.reload() assert post.tags == ["code", "mongodb"] + @requires_mongodb_gte_42 + def test_aggregation_update(self): + """Ensure that the 'aggregation_update' update works correctly.""" + + class BlogPost(Document): + slug = StringField() + tags = ListField(StringField()) + + BlogPost.drop_collection() + + post = BlogPost(slug="test") + post.save() + + BlogPost.objects(slug="test").update( + __raw__=[{"$set": {"slug": {"$concat": ["$slug", " ", "$slug"]}}}], + ) + post.reload() + assert post.slug == "test test" + + BlogPost.objects(slug="test test").update( + __raw__=[ + {"$set": {"slug": {"$concat": ["$slug", " ", "it"]}}}, # test test it + { + "$set": {"slug": {"$concat": ["When", " ", "$slug"]}} + }, # When test test it + ], + ) + post.reload() + assert post.slug == "When test test it" + def test_add_to_set_each(self): class Item(Document): name = StringField(required=True) @@ -2491,6 +2593,12 @@ def test_order_by(self): ages = [p.age for p in self.Person.objects.order_by("-name")] assert ages == [30, 40, 20] + ages = [p.age for p in self.Person.objects.order_by()] + assert ages == [40, 20, 30] + + ages = [p.age for p in self.Person.objects.order_by("")] + assert ages == [40, 20, 30] + def test_order_by_optional(self): class BlogPost(Document): title = StringField() @@ -3424,6 +3532,27 @@ class Bar(Document): foo.save() assert Foo.objects.distinct("bar") == [bar] + assert Foo.objects.no_dereference().distinct("bar") == [bar.pk] + + def test_base_queryset_iter_raise_not_implemented(self): + class Tmp(Document): + pass + + qs = BaseQuerySet(document=Tmp, collection=Tmp._get_collection()) + with pytest.raises(NotImplementedError): + _ = list(qs) + + def test_search_text_raise_if_called_2_times(self): + class News(Document): + title = StringField() + content = StringField() + is_active = BooleanField(default=True) + + News.drop_collection() + with pytest.raises(OperationError): + News.objects.search_text("t1", language="portuguese").search_text( + "t2", language="french" + ) def test_text_indexes(self): class News(Document): @@ -3630,6 +3759,7 @@ class Foo(Document): foo.save() assert Foo.objects.distinct("bar_lst") == [bar_1, bar_2] + assert Foo.objects.no_dereference().distinct("bar_lst") == [bar_1.pk, bar_2.pk] def test_custom_manager(self): """Ensure that custom QuerySetManager instances work as expected.""" @@ -3816,6 +3946,10 @@ class BlogPost(Document): assert objects[post_2.id].title == post_2.title assert objects[post_5.id].title == post_5.title + objects = BlogPost.objects.as_pymongo().in_bulk(ids) + assert len(objects) == 3 + assert isinstance(objects[post_1.id], dict) + BlogPost.drop_collection() def tearDown(self): @@ -3864,6 +3998,33 @@ class Post(Document): Post.drop_collection() + def test_custom_querysets_set_manager_methods(self): + """Ensure that custom QuerySet classes methods may be used.""" + + class CustomQuerySet(QuerySet): + def delete(self, *args, **kwargs): + """Example of method when one want to change default behaviour of it""" + return 0 + + class CustomQuerySetManager(QuerySetManager): + queryset_class = CustomQuerySet + + class Post(Document): + objects = CustomQuerySetManager() + + Post.drop_collection() + + assert isinstance(Post.objects, CustomQuerySet) + assert Post.objects.delete() == 0 + + post = Post() + post.save() + assert Post.objects.count() == 1 + post.delete() + assert Post.objects.count() == 1 + + Post.drop_collection() + def test_custom_querysets_managers_directly(self): """Ensure that custom QuerySet classes may be used.""" @@ -4126,14 +4287,14 @@ class Number(Document): assert [1, 2, 3] == numbers Number.drop_collection() - def test_ensure_index(self): + def test_create_index(self): """Ensure that manual creation of indexes works.""" class Comment(Document): message = StringField() meta = {"allow_inheritance": True} - Comment.ensure_index("message") + Comment.create_index("message") info = Comment.objects._collection.index_information() info = [ @@ -5468,7 +5629,6 @@ class Person(Document): # Check that bool(queryset) does not uses the orderby qs = Person.objects.order_by("name") with query_counter() as q: - if bool(qs): pass @@ -5481,7 +5641,6 @@ class Person(Document): # Check that normal query uses orderby qs2 = Person.objects.order_by("name") with query_counter() as q: - for x in qs2: pass @@ -5505,7 +5664,6 @@ class Person(Document): Person(name="A").save() with query_counter() as q: - if Person.objects: pass diff --git a/tests/queryset/test_queryset_aggregation.py b/tests/queryset/test_queryset_aggregation.py index ecc0db6e0..f1d504c0b 100644 --- a/tests/queryset/test_queryset_aggregation.py +++ b/tests/queryset/test_queryset_aggregation.py @@ -276,6 +276,23 @@ class Aggr(Document): {"_id": agg2.id, "c": 0.0, "name": "Y"}, ] + def test_queryset_aggregation_none(self): + class Person(Document): + name = StringField() + age = IntField() + + Person.drop_collection() + + p1 = Person(name="Isabella Luanna", age=16) + p2 = Person(name="Wilson Junior", age=21) + p3 = Person(name="Sandra Mara", age=37) + Person.objects.insert([p1, p2, p3]) + + pipeline = [{"$project": {"name": {"$toUpper": "$name"}}}] + data = Person.objects().none().order_by("name").aggregate(pipeline) + + assert list(data) == [] + if __name__ == "__main__": unittest.main() diff --git a/tests/queryset/test_transform.py b/tests/queryset/test_transform.py index 9a7d6365e..5627597f8 100644 --- a/tests/queryset/test_transform.py +++ b/tests/queryset/test_transform.py @@ -275,7 +275,7 @@ class Doc(Document): assert Doc.objects(df__type=2).count() == 1 # str assert Doc.objects(df__type=16).count() == 1 # int - def test_last_field_name_like_operator(self): + def test_embedded_field_name_like_operator(self): class EmbeddedItem(EmbeddedDocument): type = StringField() name = StringField() @@ -295,6 +295,30 @@ class Doc(Document): assert 1 == Doc.objects(item__type__="sword").count() assert 0 == Doc.objects(item__type__="axe").count() + def test_regular_field_named_like_operator(self): + class SimpleDoc(Document): + size = StringField() + type = StringField() + + SimpleDoc.drop_collection() + SimpleDoc(type="ok", size="ok").save() + + qry = transform.query(SimpleDoc, type="testtype") + assert qry == {"type": "testtype"} + + assert SimpleDoc.objects(type="ok").count() == 1 + assert SimpleDoc.objects(size="ok").count() == 1 + + update = transform.update(SimpleDoc, set__type="testtype") + assert update == {"$set": {"type": "testtype"}} + + SimpleDoc.objects.update(set__type="testtype") + SimpleDoc.objects.update(set__size="testsize") + + s = SimpleDoc.objects.first() + assert s.type == "testtype" + assert s.size == "testsize" + def test_understandable_error_raised(self): class Event(Document): title = StringField() diff --git a/tests/test_changelog_consistency.py b/tests/test_changelog_consistency.py new file mode 100644 index 000000000..c612aa0f2 --- /dev/null +++ b/tests/test_changelog_consistency.py @@ -0,0 +1,26 @@ +import os +from pathlib import Path + +from mongoengine import get_version +from tests import DOCS_DIR + + +def test_package_version_described_in_changelog(): + """Ensures that changelog is updated when version is incremented""" + version_str = get_version() + changelog_content = Path(os.path.join(DOCS_DIR, "changelog.rst")).read_text() + assert ( + version_str in changelog_content + ), "Version in __init__.py not present in changelog" + + +def test_package_version_incremented_when_new_version_added_to_changelog(): + """Ensures that changelog is updated when version is incremented""" + version_str = get_version() + changelog_content = Path(os.path.join(DOCS_DIR, "changelog.rst")).read_text() + + def find_between(s, start, end): + return (s.split(start))[1].split(end)[0] + + most_recent_version = find_between(changelog_content, start="Changes in ", end="\n") + assert most_recent_version == version_str diff --git a/tests/test_connection.py b/tests/test_connection.py index 2b7d46a78..c88f87c93 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -1,11 +1,17 @@ import datetime import unittest +import uuid import pymongo import pytest from bson.tz_util import utc from pymongo import MongoClient, ReadPreference -from pymongo.errors import InvalidName, OperationFailure +from pymongo.errors import ( + InvalidName, + InvalidOperation, + OperationFailure, +) +from pymongo.read_preferences import Secondary import mongoengine.connection from mongoengine import ( @@ -19,10 +25,16 @@ from mongoengine.connection import ( DEFAULT_DATABASE_NAME, ConnectionFailure, + _get_connection_settings, disconnect, get_connection, get_db, ) +from mongoengine.pymongo_support import PYMONGO_VERSION + + +def random_str(): + return str(uuid.uuid4()) def get_tz_awareness(connection): @@ -48,7 +60,7 @@ def test_connect(self): connect("mongoenginetest") conn = get_connection() - assert isinstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.MongoClient) db = get_db() assert isinstance(db, pymongo.database.Database) @@ -56,7 +68,13 @@ def test_connect(self): connect("mongoenginetest2", alias="testdb") conn = get_connection("testdb") - assert isinstance(conn, pymongo.mongo_client.MongoClient) + assert isinstance(conn, pymongo.MongoClient) + + connect( + "mongoenginetest2", alias="testdb3", mongo_client_class=pymongo.MongoClient + ) + conn = get_connection("testdb") + assert isinstance(conn, pymongo.MongoClient) def test_connect_disconnect_works_properly(self): class History1(Document): @@ -164,6 +182,35 @@ def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way with pytest.raises(ConnectionFailure): connect(host="mongodb://localhost:27017/%s" % db_name, alias=db_alias) + def test___get_connection_settings(self): + funky_host = "mongodb://root:12345678@1.1.1.1:27017,2.2.2.2:27017,3.3.3.3:27017/db_api?replicaSet=s0&readPreference=secondary&uuidRepresentation=javaLegacy&readPreferenceTags=region:us-west-2,usage:api" + settings = _get_connection_settings(host=funky_host) + + if PYMONGO_VERSION < (4,): + read_pref = Secondary( + tag_sets=[{"region": "us-west-2", "usage": "api"}], + max_staleness=-1, + ) + else: + read_pref = Secondary( + tag_sets=[{"region": "us-west-2", "usage": "api"}], + max_staleness=-1, + hedge=None, + ) + assert settings == { + "authentication_mechanism": None, + "authentication_source": None, + "authmechanismproperties": None, + "host": [funky_host], + "name": "db_api", + "password": "12345678", + "port": 27017, + "read_preference": read_pref, + "replicaSet": "s0", + "username": "root", + "uuidrepresentation": "javaLegacy", + } + def test_connect_passes_silently_connect_multiple_times_with_same_config(self): # test default connection to `test` connect() @@ -286,6 +333,30 @@ def test_disconnect_silently_pass_if_alias_does_not_exist(self): assert len(connections) == 0 disconnect(alias="not_exist") + def test_disconnect_does_not_close_client_used_by_another_alias(self): + client1 = connect(alias="disconnect_reused_client_test_1") + client2 = connect(alias="disconnect_reused_client_test_2") + client3 = connect(alias="disconnect_reused_client_test_3", maxPoolSize=10) + assert client1 is client2 + assert client1 is not client3 + client1.admin.command("ping") + disconnect("disconnect_reused_client_test_1") + # The client is not closed because the second alias still exists. + client2.admin.command("ping") + disconnect("disconnect_reused_client_test_2") + # The client is now closed: + if PYMONGO_VERSION >= (4,): + with pytest.raises(InvalidOperation): + client2.admin.command("ping") + # 3rd client connected to the same cluster with different options + # is not closed either. + client3.admin.command("ping") + disconnect("disconnect_reused_client_test_3") + # 3rd client is now closed: + if PYMONGO_VERSION >= (4,): + with pytest.raises(InvalidOperation): + client3.admin.command("ping") + def test_disconnect_all(self): connections = mongoengine.connection._connections dbs = mongoengine.connection._dbs @@ -350,8 +421,14 @@ def test_connect_uri(self): c.mongoenginetest.system.users.delete_many({}) c.admin.command("createUser", "admin", pwd="password", roles=["root"]) - c.admin.authenticate("admin", "password") - c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"]) + + adminadmin_settings = mongoengine.connection._connection_settings[ + "adminadmin" + ] = mongoengine.connection._connection_settings["admin"].copy() + adminadmin_settings["username"] = "admin" + adminadmin_settings["password"] = "password" + ca = connect(db="mongoenginetest", alias="adminadmin") + ca.admin.command("createUser", "username", pwd="password", roles=["dbOwner"]) connect( "testdb_uri", host="mongodb://username:password@localhost/mongoenginetest" @@ -404,8 +481,14 @@ def test_uri_without_credentials_doesnt_override_conn_settings(self): # OperationFailure means that mongoengine attempted authentication # w/ the provided username/password and failed - that's the desired # behavior. If the MongoDB URI would override the credentials - with pytest.raises(OperationFailure): - get_db() + if PYMONGO_VERSION >= (4,): + with pytest.raises(OperationFailure): + db = get_db() + # pymongo 4.x does not call db.authenticate and needs to perform an operation to trigger the failure + db.list_collection_names() + else: + with pytest.raises(OperationFailure): + get_db() def test_connect_uri_with_authsource(self): """Ensure that the connect() method works well with `authSource` @@ -482,7 +565,10 @@ def test_connection_pool_via_kwarg(self): conn = connect( "mongoenginetest", alias="max_pool_size_via_kwarg", **pool_size_kwargs ) - assert conn.max_pool_size == 100 + if PYMONGO_VERSION >= (4,): + assert conn.options.pool_options.max_pool_size == 100 + else: + assert conn.max_pool_size == 100 def test_connection_pool_via_uri(self): """Ensure we can specify a max connection pool size using @@ -492,7 +578,10 @@ def test_connection_pool_via_uri(self): host="mongodb://localhost/test?maxpoolsize=100", alias="max_pool_size_via_uri", ) - assert conn.max_pool_size == 100 + if PYMONGO_VERSION >= (4,): + assert conn.options.pool_options.max_pool_size == 100 + else: + assert conn.max_pool_size == 100 def test_write_concern(self): """Ensure write concern can be specified in connect() via @@ -567,10 +656,54 @@ def test_connect_2_databases_uses_same_client_if_only_dbname_differs(self): assert c1 is c2 def test_connect_2_databases_uses_different_client_if_different_parameters(self): - c1 = connect(alias="testdb1", db="testdb1", username="u1") - c2 = connect(alias="testdb2", db="testdb2", username="u2") + c1 = connect(alias="testdb1", db="testdb1", username="u1", password="pass") + c2 = connect(alias="testdb2", db="testdb2", username="u2", password="pass") assert c1 is not c2 + def test_connect_uri_uuidrepresentation_set_in_uri(self): + rand = random_str() + tmp_conn = connect( + alias=rand, + host=f"mongodb://localhost:27017/{rand}?uuidRepresentation=csharpLegacy", + ) + assert ( + tmp_conn.options.codec_options.uuid_representation + == pymongo.common._UUID_REPRESENTATIONS["csharpLegacy"] + ) + disconnect(rand) + + def test_connect_uri_uuidrepresentation_set_as_arg(self): + rand = random_str() + tmp_conn = connect(alias=rand, db=rand, uuidRepresentation="javaLegacy") + assert ( + tmp_conn.options.codec_options.uuid_representation + == pymongo.common._UUID_REPRESENTATIONS["javaLegacy"] + ) + disconnect(rand) + + def test_connect_uri_uuidrepresentation_set_both_arg_and_uri_arg_prevail(self): + rand = random_str() + tmp_conn = connect( + alias=rand, + host=f"mongodb://localhost:27017/{rand}?uuidRepresentation=csharpLegacy", + uuidRepresentation="javaLegacy", + ) + assert ( + tmp_conn.options.codec_options.uuid_representation + == pymongo.common._UUID_REPRESENTATIONS["javaLegacy"] + ) + disconnect(rand) + + def test_connect_uri_uuidrepresentation_default_to_pythonlegacy(self): + # To be changed soon to unspecified + rand = random_str() + tmp_conn = connect(alias=rand, db=rand) + assert ( + tmp_conn.options.codec_options.uuid_representation + == pymongo.common._UUID_REPRESENTATIONS["pythonLegacy"] + ) + disconnect(rand) + if __name__ == "__main__": unittest.main() diff --git a/tests/test_connection_mongomock.py b/tests/test_connection_mongomock.py index 6f9e2d269..f8316501f 100644 --- a/tests/test_connection_mongomock.py +++ b/tests/test_connection_mongomock.py @@ -32,45 +32,76 @@ def tearDown(self): mongoengine.connection._connections = {} mongoengine.connection._dbs = {} + @require_mongomock + def test_connect_raise_if_mongomock_uri_provided(self): + with pytest.raises( + Exception, match="Use of mongomock:// URI or 'is_mock' were removed" + ): + connect("test", host="mongomock://localhost") + + @require_mongomock + def test_connect_raise_if_is_mock_provided(self): + with pytest.raises( + Exception, match="Use of mongomock:// URI or 'is_mock' were removed" + ): + connect("test", host="mongodb://localhost", is_mock=True) + @require_mongomock def test_connect_in_mocking(self): """Ensure that the connect() method works properly in mocking.""" - connect("mongoenginetest", host="mongomock://localhost") + connect( + "mongoenginetest", + host="mongodb://localhost", + mongo_client_class=mongomock.MongoClient, + ) conn = get_connection() assert isinstance(conn, mongomock.MongoClient) - connect("mongoenginetest2", host="mongomock://localhost", alias="testdb2") + connect( + "mongoenginetest2", + host="mongodb://localhost", + mongo_client_class=mongomock.MongoClient, + alias="testdb2", + ) conn = get_connection("testdb2") assert isinstance(conn, mongomock.MongoClient) connect( "mongoenginetest3", host="mongodb://localhost", - is_mock=True, + mongo_client_class=mongomock.MongoClient, alias="testdb3", ) conn = get_connection("testdb3") assert isinstance(conn, mongomock.MongoClient) - connect("mongoenginetest4", is_mock=True, alias="testdb4") + connect( + "mongoenginetest4", + mongo_client_class=mongomock.MongoClient, + alias="testdb4", + ) conn = get_connection("testdb4") assert isinstance(conn, mongomock.MongoClient) connect( host="mongodb://localhost:27017/mongoenginetest5", - is_mock=True, + mongo_client_class=mongomock.MongoClient, alias="testdb5", ) conn = get_connection("testdb5") assert isinstance(conn, mongomock.MongoClient) - connect(host="mongomock://localhost:27017/mongoenginetest6", alias="testdb6") + connect( + host="mongodb://localhost:27017/mongoenginetest6", + mongo_client_class=mongomock.MongoClient, + alias="testdb6", + ) conn = get_connection("testdb6") assert isinstance(conn, mongomock.MongoClient) connect( - host="mongomock://localhost:27017/mongoenginetest7", - is_mock=True, + host="mongodb://localhost:27017/mongoenginetest7", + mongo_client_class=mongomock.MongoClient, alias="testdb7", ) conn = get_connection("testdb7") @@ -84,7 +115,10 @@ def test_default_database_with_mocking(self): class SomeDocument(Document): pass - conn = connect(host="mongomock://localhost:27017/mongoenginetest") + conn = connect( + host="mongodb://localhost:27017/mongoenginetest", + mongo_client_class=mongomock.MongoClient, + ) some_document = SomeDocument() # database won't exist until we save a document some_document.save() @@ -96,13 +130,16 @@ class SomeDocument(Document): def test_basic_queries_against_mongomock(self): disconnect_all() - connect(host="mongomock://localhost:27017/mongoenginetest") + connect( + host="mongodb://localhost:27017/mongoenginetest", + mongo_client_class=mongomock.MongoClient, + ) class Person(Document): name = StringField() Person.drop_collection() - assert Person.objects.count() == 0 + assert Person.objects.limit(0).count(with_limit_and_skip=True) == 0 bob = Person(name="Bob").save() john = Person(name="John").save() @@ -129,35 +166,38 @@ def test_connect_with_host_list(self): Uses mongomock to test w/o needing multiple mongod/mongos processes """ - connect(host=["mongomock://localhost"]) + connect(host=["mongodb://localhost"], mongo_client_class=mongomock.MongoClient) conn = get_connection() assert isinstance(conn, mongomock.MongoClient) - connect(host=["mongodb://localhost"], is_mock=True, alias="testdb2") - conn = get_connection("testdb2") - assert isinstance(conn, mongomock.MongoClient) - - connect(host=["localhost"], is_mock=True, alias="testdb3") + connect( + host=["localhost"], + mongo_client_class=mongomock.MongoClient, + alias="testdb3", + ) conn = get_connection("testdb3") assert isinstance(conn, mongomock.MongoClient) connect( - host=["mongomock://localhost:27017", "mongomock://localhost:27018"], + host=["mongodb://localhost:27017", "mongodb://localhost:27018"], alias="testdb4", + mongo_client_class=mongomock.MongoClient, ) conn = get_connection("testdb4") assert isinstance(conn, mongomock.MongoClient) connect( host=["mongodb://localhost:27017", "mongodb://localhost:27018"], - is_mock=True, + mongo_client_class=mongomock.MongoClient, alias="testdb5", ) conn = get_connection("testdb5") assert isinstance(conn, mongomock.MongoClient) connect( - host=["localhost:27017", "localhost:27018"], is_mock=True, alias="testdb6" + host=["localhost:27017", "localhost:27018"], + mongo_client_class=mongomock.MongoClient, + alias="testdb6", ) conn = get_connection("testdb6") assert isinstance(conn, mongomock.MongoClient) diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index a23cca394..daf9a2c18 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -77,7 +77,6 @@ class Group(Document): assert 1 == Group.objects.count() with switch_db(Group, "testdb-1") as Group: - assert 0 == Group.objects.count() Group(name="hello").save() @@ -105,7 +104,6 @@ class Group(Document): assert 1 == Group.objects.count() with switch_collection(Group, "group1") as Group: - assert 0 == Group.objects.count() Group(name="hello - group1").save() @@ -270,17 +268,23 @@ def test_query_counter_temporarily_modifies_profiling_level(self): connect("mongoenginetest") db = get_db() - initial_profiling_level = db.profiling_level() + def _current_profiling_level(): + return db.command({"profile": -1})["was"] + + def _set_profiling_level(lvl): + db.command({"profile": lvl}) + + initial_profiling_level = _current_profiling_level() try: new_level = 1 - db.set_profiling_level(new_level) - assert db.profiling_level() == new_level + _set_profiling_level(new_level) + assert _current_profiling_level() == new_level with query_counter(): - assert db.profiling_level() == 2 - assert db.profiling_level() == new_level + assert _current_profiling_level() == 2 + assert _current_profiling_level() == new_level except Exception: - db.set_profiling_level( + _set_profiling_level( initial_profiling_level ) # Ensures it gets reseted no matter the outcome of the test raise diff --git a/tests/test_dereference.py b/tests/test_dereference.py index bddcc5432..224538312 100644 --- a/tests/test_dereference.py +++ b/tests/test_dereference.py @@ -1116,8 +1116,8 @@ class Baz(Document): foo.save() foo.reload() - assert type(foo.bar) == Bar - assert type(foo.baz) == Baz + assert isinstance(foo.bar, Bar) + assert isinstance(foo.baz, Baz) def test_document_reload_reference_integrity(self): """ diff --git a/tests/test_pymongo_support.py b/tests/test_pymongo_support.py new file mode 100644 index 000000000..37bfc9755 --- /dev/null +++ b/tests/test_pymongo_support.py @@ -0,0 +1,16 @@ +from mongoengine import Document +from mongoengine.pymongo_support import count_documents +from tests.utils import MongoDBTestCase + + +class TestPymongoSupport(MongoDBTestCase): + def test_count_documents(self): + class Test(Document): + pass + + Test.drop_collection() + Test().save() + Test().save() + assert count_documents(Test._get_collection(), filter={}) == 2 + assert count_documents(Test._get_collection(), filter={}, skip=1) == 1 + assert count_documents(Test._get_collection(), filter={}, limit=0) == 0 diff --git a/tests/test_signals.py b/tests/test_signals.py index 1d84347d7..3ee9685dc 100644 --- a/tests/test_signals.py +++ b/tests/test_signals.py @@ -97,7 +97,6 @@ def post_bulk_insert(cls, sender, documents, **kwargs): Author.id.set_next_value(0) class Another(Document): - name = StringField() def __unicode__(self): diff --git a/tests/utils.py b/tests/utils.py index a05b9c14b..7d0eb33f2 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -37,6 +37,10 @@ def requires_mongodb_lt_42(func): return _decorated_with_ver_requirement(func, (4, 2), oper=operator.lt) +def requires_mongodb_gte_42(func): + return _decorated_with_ver_requirement(func, (4, 2), oper=operator.ge) + + def requires_mongodb_gte_44(func): return _decorated_with_ver_requirement(func, (4, 4), oper=operator.ge) @@ -67,7 +71,7 @@ def _inner(*args, **kwargs): return func(*args, **kwargs) pretty_version = ".".join(str(n) for n in mongo_version_req) - pytest.skip(f"Needs MongoDB v{pretty_version}+") + pytest.skip(f"Needs MongoDB {oper.__name__} v{pretty_version}") _inner.__name__ = func.__name__ _inner.__doc__ = func.__doc__ diff --git a/tox.ini b/tox.ini index 937544d27..1608ebd17 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = pypy3-{mg34,mg36,mg39,mg311} +envlist = pypy3-{mg34,mg36,mg39,mg311,mg312,mg4,mg432,mg441,mg460} [testenv] commands = @@ -9,5 +9,10 @@ deps = mg36: pymongo>=3.6,<3.7 mg39: pymongo>=3.9,<3.10 mg311: pymongo>=3.11,<3.12 + mg312: pymongo>=3.12,<3.13 + mg4: pymongo>=4.0,<4.1 + mg432: pymongo>=4.3,<4.4 + mg441: pymongo>=4.4,<4.5 + mg460: pymongo>=4.6,<4.7 setenv = PYTHON_EGG_CACHE = {envdir}/python-eggs