diff --git a/awx/lib/site-packages/mongoengine/__init__.py b/awx/lib/site-packages/mongoengine/__init__.py
new file mode 100644
index 0000000000..b215181adc
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/__init__.py
@@ -0,0 +1,26 @@
+import document
+from document import *
+import fields
+from fields import *
+import connection
+from connection import *
+import queryset
+from queryset import *
+import signals
+from signals import *
+from errors import *
+import errors
+import django
+
+__all__ = (list(document.__all__) + fields.__all__ + connection.__all__ +
+ list(queryset.__all__) + signals.__all__ + list(errors.__all__))
+
+VERSION = (0, 9, 0)
+
+
+def get_version():
+ if isinstance(VERSION[-1], basestring):
+ return '.'.join(map(str, VERSION[:-1])) + VERSION[-1]
+ return '.'.join(map(str, VERSION))
+
+__version__ = get_version()
diff --git a/awx/lib/site-packages/mongoengine/base/__init__.py b/awx/lib/site-packages/mongoengine/base/__init__.py
new file mode 100644
index 0000000000..e8d4b6ad96
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/base/__init__.py
@@ -0,0 +1,8 @@
+from mongoengine.base.common import *
+from mongoengine.base.datastructures import *
+from mongoengine.base.document import *
+from mongoengine.base.fields import *
+from mongoengine.base.metaclasses import *
+
+# Help with backwards compatibility
+from mongoengine.errors import *
diff --git a/awx/lib/site-packages/mongoengine/base/common.py b/awx/lib/site-packages/mongoengine/base/common.py
new file mode 100644
index 0000000000..3a966c792f
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/base/common.py
@@ -0,0 +1,26 @@
+from mongoengine.errors import NotRegistered
+
+__all__ = ('ALLOW_INHERITANCE', 'get_document', '_document_registry')
+
+ALLOW_INHERITANCE = False
+
+_document_registry = {}
+
+
+def get_document(name):
+ doc = _document_registry.get(name, None)
+ if not doc:
+ # Possible old style name
+ single_end = name.split('.')[-1]
+ compound_end = '.%s' % single_end
+ possible_match = [k for k in _document_registry.keys()
+ if k.endswith(compound_end) or k == single_end]
+ if len(possible_match) == 1:
+ doc = _document_registry.get(possible_match.pop(), None)
+ if not doc:
+ raise NotRegistered("""
+ `%s` has not been registered in the document registry.
+ Importing the document class automatically registers it, has it
+ been imported?
+ """.strip() % name)
+ return doc
diff --git a/awx/lib/site-packages/mongoengine/base/datastructures.py b/awx/lib/site-packages/mongoengine/base/datastructures.py
new file mode 100644
index 0000000000..bac67ddca0
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/base/datastructures.py
@@ -0,0 +1,449 @@
+import weakref
+import functools
+import itertools
+from mongoengine.common import _import_class
+from mongoengine.errors import DoesNotExist, MultipleObjectsReturned
+
+__all__ = ("BaseDict", "BaseList", "EmbeddedDocumentList")
+
+
+class BaseDict(dict):
+ """A special dict so we can watch any changes"""
+
+ _dereferenced = False
+ _instance = None
+ _name = None
+
+ def __init__(self, dict_items, instance, name):
+ Document = _import_class('Document')
+ EmbeddedDocument = _import_class('EmbeddedDocument')
+
+ if isinstance(instance, (Document, EmbeddedDocument)):
+ self._instance = weakref.proxy(instance)
+ self._name = name
+ return super(BaseDict, self).__init__(dict_items)
+
+ def __getitem__(self, key, *args, **kwargs):
+ value = super(BaseDict, self).__getitem__(key)
+
+ EmbeddedDocument = _import_class('EmbeddedDocument')
+ if isinstance(value, EmbeddedDocument) and value._instance is None:
+ value._instance = self._instance
+ elif not isinstance(value, BaseDict) and isinstance(value, dict):
+ value = BaseDict(value, None, '%s.%s' % (self._name, key))
+ super(BaseDict, self).__setitem__(key, value)
+ value._instance = self._instance
+ elif not isinstance(value, BaseList) and isinstance(value, list):
+ value = BaseList(value, None, '%s.%s' % (self._name, key))
+ super(BaseDict, self).__setitem__(key, value)
+ value._instance = self._instance
+ return value
+
+ def __setitem__(self, key, value, *args, **kwargs):
+ self._mark_as_changed(key)
+ return super(BaseDict, self).__setitem__(key, value)
+
+ def __delete__(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseDict, self).__delete__(*args, **kwargs)
+
+ def __delitem__(self, key, *args, **kwargs):
+ self._mark_as_changed(key)
+ return super(BaseDict, self).__delitem__(key)
+
+ def __delattr__(self, key, *args, **kwargs):
+ self._mark_as_changed(key)
+ return super(BaseDict, self).__delattr__(key)
+
+ def __getstate__(self):
+ self.instance = None
+ self._dereferenced = False
+ return self
+
+ def __setstate__(self, state):
+ self = state
+ return self
+
+ def clear(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseDict, self).clear(*args, **kwargs)
+
+ def pop(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseDict, self).pop(*args, **kwargs)
+
+ def popitem(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseDict, self).popitem(*args, **kwargs)
+
+ def setdefault(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseDict, self).setdefault(*args, **kwargs)
+
+ def update(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseDict, self).update(*args, **kwargs)
+
+ def _mark_as_changed(self, key=None):
+ if hasattr(self._instance, '_mark_as_changed'):
+ if key:
+ self._instance._mark_as_changed('%s.%s' % (self._name, key))
+ else:
+ self._instance._mark_as_changed(self._name)
+
+
+class BaseList(list):
+ """A special list so we can watch any changes
+ """
+
+ _dereferenced = False
+ _instance = None
+ _name = None
+
+ def __init__(self, list_items, instance, name):
+ Document = _import_class('Document')
+ EmbeddedDocument = _import_class('EmbeddedDocument')
+
+ if isinstance(instance, (Document, EmbeddedDocument)):
+ self._instance = weakref.proxy(instance)
+ self._name = name
+ super(BaseList, self).__init__(list_items)
+
+ def __getitem__(self, key, *args, **kwargs):
+ value = super(BaseList, self).__getitem__(key)
+
+ EmbeddedDocument = _import_class('EmbeddedDocument')
+ if isinstance(value, EmbeddedDocument) and value._instance is None:
+ value._instance = self._instance
+ elif not isinstance(value, BaseDict) and isinstance(value, dict):
+ value = BaseDict(value, None, '%s.%s' % (self._name, key))
+ super(BaseList, self).__setitem__(key, value)
+ value._instance = self._instance
+ elif not isinstance(value, BaseList) and isinstance(value, list):
+ value = BaseList(value, None, '%s.%s' % (self._name, key))
+ super(BaseList, self).__setitem__(key, value)
+ value._instance = self._instance
+ return value
+
+ def __setitem__(self, key, value, *args, **kwargs):
+ if isinstance(key, slice):
+ self._mark_as_changed()
+ else:
+ self._mark_as_changed(key)
+ return super(BaseList, self).__setitem__(key, value)
+
+ def __delitem__(self, key, *args, **kwargs):
+ if isinstance(key, slice):
+ self._mark_as_changed()
+ else:
+ self._mark_as_changed(key)
+ return super(BaseList, self).__delitem__(key)
+
+ def __setslice__(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).__setslice__(*args, **kwargs)
+
+ def __delslice__(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).__delslice__(*args, **kwargs)
+
+ def __getstate__(self):
+ self.instance = None
+ self._dereferenced = False
+ return self
+
+ def __setstate__(self, state):
+ self = state
+ return self
+
+ def append(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).append(*args, **kwargs)
+
+ def extend(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).extend(*args, **kwargs)
+
+ def insert(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).insert(*args, **kwargs)
+
+ def pop(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).pop(*args, **kwargs)
+
+ def remove(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).remove(*args, **kwargs)
+
+ def reverse(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).reverse(*args, **kwargs)
+
+ def sort(self, *args, **kwargs):
+ self._mark_as_changed()
+ return super(BaseList, self).sort(*args, **kwargs)
+
+ def _mark_as_changed(self, key=None):
+ if hasattr(self._instance, '_mark_as_changed'):
+ if key:
+ self._instance._mark_as_changed('%s.%s' % (self._name, key))
+ else:
+ self._instance._mark_as_changed(self._name)
+
+
+class EmbeddedDocumentList(BaseList):
+
+ @classmethod
+ def __match_all(cls, i, kwargs):
+ items = kwargs.items()
+ return all([
+ getattr(i, k) == v or str(getattr(i, k)) == v for k, v in items
+ ])
+
+ @classmethod
+ def __only_matches(cls, obj, kwargs):
+ if not kwargs:
+ return obj
+ return filter(lambda i: cls.__match_all(i, kwargs), obj)
+
+ def __init__(self, list_items, instance, name):
+ super(EmbeddedDocumentList, self).__init__(list_items, instance, name)
+ self._instance = instance
+
+ def filter(self, **kwargs):
+ """
+ Filters the list by only including embedded documents with the
+ given keyword arguments.
+
+ :param kwargs: The keyword arguments corresponding to the fields to
+ filter on. *Multiple arguments are treated as if they are ANDed
+ together.*
+ :return: A new ``EmbeddedDocumentList`` containing the matching
+ embedded documents.
+
+ Raises ``AttributeError`` if a given keyword is not a valid field for
+ the embedded document class.
+ """
+ values = self.__only_matches(self, kwargs)
+ return EmbeddedDocumentList(values, self._instance, self._name)
+
+ def exclude(self, **kwargs):
+ """
+ Filters the list by excluding embedded documents with the given
+ keyword arguments.
+
+ :param kwargs: The keyword arguments corresponding to the fields to
+ exclude on. *Multiple arguments are treated as if they are ANDed
+ together.*
+ :return: A new ``EmbeddedDocumentList`` containing the non-matching
+ embedded documents.
+
+ Raises ``AttributeError`` if a given keyword is not a valid field for
+ the embedded document class.
+ """
+ exclude = self.__only_matches(self, kwargs)
+ values = [item for item in self if item not in exclude]
+ return EmbeddedDocumentList(values, self._instance, self._name)
+
+ def count(self):
+ """
+ The number of embedded documents in the list.
+
+ :return: The length of the list, equivalent to the result of ``len()``.
+ """
+ return len(self)
+
+ def get(self, **kwargs):
+ """
+ Retrieves an embedded document determined by the given keyword
+ arguments.
+
+ :param kwargs: The keyword arguments corresponding to the fields to
+ search on. *Multiple arguments are treated as if they are ANDed
+ together.*
+ :return: The embedded document matched by the given keyword arguments.
+
+ Raises ``DoesNotExist`` if the arguments used to query an embedded
+ document returns no results. ``MultipleObjectsReturned`` if more
+ than one result is returned.
+ """
+ values = self.__only_matches(self, kwargs)
+ if len(values) == 0:
+ raise DoesNotExist(
+ "%s matching query does not exist." % self._name
+ )
+ elif len(values) > 1:
+ raise MultipleObjectsReturned(
+ "%d items returned, instead of 1" % len(values)
+ )
+
+ return values[0]
+
+ def first(self):
+ """
+ Returns the first embedded document in the list, or ``None`` if empty.
+ """
+ if len(self) > 0:
+ return self[0]
+
+ def create(self, **values):
+ """
+ Creates a new embedded document and saves it to the database.
+
+ .. note::
+ The embedded document changes are not automatically saved
+ to the database after calling this method.
+
+ :param values: A dictionary of values for the embedded document.
+ :return: The new embedded document instance.
+ """
+ name = self._name
+ EmbeddedClass = self._instance._fields[name].field.document_type_obj
+ self._instance[self._name].append(EmbeddedClass(**values))
+
+ return self._instance[self._name][-1]
+
+ def save(self, *args, **kwargs):
+ """
+ Saves the ancestor document.
+
+ :param args: Arguments passed up to the ancestor Document's save
+ method.
+ :param kwargs: Keyword arguments passed up to the ancestor Document's
+ save method.
+ """
+ self._instance.save(*args, **kwargs)
+
+ def delete(self):
+ """
+ Deletes the embedded documents from the database.
+
+ .. note::
+ The embedded document changes are not automatically saved
+ to the database after calling this method.
+
+ :return: The number of entries deleted.
+ """
+ values = list(self)
+ for item in values:
+ self._instance[self._name].remove(item)
+
+ return len(values)
+
+ def update(self, **update):
+ """
+ Updates the embedded documents with the given update values.
+
+ .. note::
+ The embedded document changes are not automatically saved
+ to the database after calling this method.
+
+ :param update: A dictionary of update values to apply to each
+ embedded document.
+ :return: The number of entries updated.
+ """
+ if len(update) == 0:
+ return 0
+ values = list(self)
+ for item in values:
+ for k, v in update.items():
+ setattr(item, k, v)
+
+ return len(values)
+
+
+class StrictDict(object):
+ __slots__ = ()
+ _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create'])
+ _classes = {}
+ def __init__(self, **kwargs):
+ for k,v in kwargs.iteritems():
+ setattr(self, k, v)
+ def __getitem__(self, key):
+ key = '_reserved_' + key if key in self._special_fields else key
+ try:
+ return getattr(self, key)
+ except AttributeError:
+ raise KeyError(key)
+ def __setitem__(self, key, value):
+ key = '_reserved_' + key if key in self._special_fields else key
+ return setattr(self, key, value)
+ def __contains__(self, key):
+ return hasattr(self, key)
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
+ def pop(self, key, default=None):
+ v = self.get(key, default)
+ try:
+ delattr(self, key)
+ except AttributeError:
+ pass
+ return v
+ def iteritems(self):
+ for key in self:
+ yield key, self[key]
+ def items(self):
+ return [(k, self[k]) for k in iter(self)]
+ def keys(self):
+ return list(iter(self))
+ def __iter__(self):
+ return (key for key in self.__slots__ if hasattr(self, key))
+ def __len__(self):
+ return len(list(self.iteritems()))
+ def __eq__(self, other):
+ return self.items() == other.items()
+ def __neq__(self, other):
+ return self.items() != other.items()
+
+ @classmethod
+ def create(cls, allowed_keys):
+ allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys)
+ allowed_keys = frozenset(allowed_keys_tuple)
+ if allowed_keys not in cls._classes:
+ class SpecificStrictDict(cls):
+ __slots__ = allowed_keys_tuple
+ def __repr__(self):
+ return "{%s}" % ', '.join('"{0!s}": {0!r}'.format(k,v) for (k,v) in self.iteritems())
+ cls._classes[allowed_keys] = SpecificStrictDict
+ return cls._classes[allowed_keys]
+
+
+class SemiStrictDict(StrictDict):
+ __slots__ = ('_extras')
+ _classes = {}
+ def __getattr__(self, attr):
+ try:
+ super(SemiStrictDict, self).__getattr__(attr)
+ except AttributeError:
+ try:
+ return self.__getattribute__('_extras')[attr]
+ except KeyError as e:
+ raise AttributeError(e)
+ def __setattr__(self, attr, value):
+ try:
+ super(SemiStrictDict, self).__setattr__(attr, value)
+ except AttributeError:
+ try:
+ self._extras[attr] = value
+ except AttributeError:
+ self._extras = {attr: value}
+
+ def __delattr__(self, attr):
+ try:
+ super(SemiStrictDict, self).__delattr__(attr)
+ except AttributeError:
+ try:
+ del self._extras[attr]
+ except KeyError as e:
+ raise AttributeError(e)
+
+ def __iter__(self):
+ try:
+ extras_iter = iter(self.__getattribute__('_extras'))
+ except AttributeError:
+ extras_iter = ()
+ return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter)
diff --git a/awx/lib/site-packages/mongoengine/base/document.py b/awx/lib/site-packages/mongoengine/base/document.py
new file mode 100644
index 0000000000..bcd7617200
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/base/document.py
@@ -0,0 +1,999 @@
+import copy
+import operator
+import numbers
+from collections import Hashable
+from functools import partial
+
+import pymongo
+from bson import json_util, ObjectId
+from bson.dbref import DBRef
+from bson.son import SON
+
+from mongoengine import signals
+from mongoengine.common import _import_class
+from mongoengine.errors import (ValidationError, InvalidDocumentError,
+ LookUpError, FieldDoesNotExist)
+from mongoengine.python_support import PY3, txt_type
+
+from mongoengine.base.common import get_document, ALLOW_INHERITANCE
+from mongoengine.base.datastructures import (
+ BaseDict,
+ BaseList,
+ EmbeddedDocumentList,
+ StrictDict,
+ SemiStrictDict
+)
+from mongoengine.base.fields import ComplexBaseField
+
+__all__ = ('BaseDocument', 'NON_FIELD_ERRORS')
+
+NON_FIELD_ERRORS = '__all__'
+
+
+class BaseDocument(object):
+ __slots__ = ('_changed_fields', '_initialised', '_created', '_data',
+ '_dynamic_fields', '_auto_id_field', '_db_field_map', '__weakref__')
+
+ _dynamic = False
+ _dynamic_lock = True
+ STRICT = False
+
+ def __init__(self, *args, **values):
+ """
+ Initialise a document or embedded document
+
+ :param __auto_convert: Try and will cast python objects to Object types
+ :param values: A dictionary of values for the document
+ """
+ self._initialised = False
+ self._created = True
+ if args:
+ # Combine positional arguments with named arguments.
+ # We only want named arguments.
+ field = iter(self._fields_ordered)
+ # If its an automatic id field then skip to the first defined field
+ if self._auto_id_field:
+ next(field)
+ for value in args:
+ name = next(field)
+ if name in values:
+ raise TypeError(
+ "Multiple values for keyword argument '" + name + "'")
+ values[name] = value
+
+ __auto_convert = values.pop("__auto_convert", True)
+
+ # 399: set default values only to fields loaded from DB
+ __only_fields = set(values.pop("__only_fields", values))
+
+ _created = values.pop("_created", True)
+
+ signals.pre_init.send(self.__class__, document=self, values=values)
+
+ # Check if there are undefined fields supplied, if so raise an
+ # Exception.
+ if not self._dynamic:
+ for var in values.keys():
+ if var not in self._fields.keys() + ['id', 'pk', '_cls', '_text_score']:
+ msg = (
+ "The field '{0}' does not exist on the document '{1}'"
+ ).format(var, self._class_name)
+ raise FieldDoesNotExist(msg)
+
+ if self.STRICT and not self._dynamic:
+ self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
+ else:
+ self._data = SemiStrictDict.create(
+ allowed_keys=self._fields_ordered)()
+
+ self._data = {}
+ self._dynamic_fields = SON()
+
+ # Assign default values to instance
+ for key, field in self._fields.iteritems():
+ if self._db_field_map.get(key, key) in __only_fields:
+ continue
+ value = getattr(self, key, None)
+ setattr(self, key, value)
+
+ if "_cls" not in values:
+ self._cls = self._class_name
+
+ # Set passed values after initialisation
+ if self._dynamic:
+ dynamic_data = {}
+ for key, value in values.iteritems():
+ if key in self._fields or key == '_id':
+ setattr(self, key, value)
+ elif self._dynamic:
+ dynamic_data[key] = value
+ else:
+ FileField = _import_class('FileField')
+ for key, value in values.iteritems():
+ if key == '__auto_convert':
+ continue
+ key = self._reverse_db_field_map.get(key, key)
+ if key in self._fields or key in ('id', 'pk', '_cls'):
+ if __auto_convert and value is not None:
+ field = self._fields.get(key)
+ if field and not isinstance(field, FileField):
+ value = field.to_python(value)
+ setattr(self, key, value)
+ else:
+ self._data[key] = value
+
+ # Set any get_fieldname_display methods
+ self.__set_field_display()
+
+ if self._dynamic:
+ self._dynamic_lock = False
+ for key, value in dynamic_data.iteritems():
+ setattr(self, key, value)
+
+ # Flag initialised
+ self._initialised = True
+ self._created = _created
+ signals.post_init.send(self.__class__, document=self)
+
+ def __delattr__(self, *args, **kwargs):
+ """Handle deletions of fields"""
+ field_name = args[0]
+ if field_name in self._fields:
+ default = self._fields[field_name].default
+ if callable(default):
+ default = default()
+ setattr(self, field_name, default)
+ else:
+ super(BaseDocument, self).__delattr__(*args, **kwargs)
+
+ def __setattr__(self, name, value):
+ # Handle dynamic data only if an initialised dynamic document
+ if self._dynamic and not self._dynamic_lock:
+
+ field = None
+ if not hasattr(self, name) and not name.startswith('_'):
+ DynamicField = _import_class("DynamicField")
+ field = DynamicField(db_field=name)
+ field.name = name
+ self._dynamic_fields[name] = field
+ self._fields_ordered += (name,)
+
+ if not name.startswith('_'):
+ value = self.__expand_dynamic_values(name, value)
+
+ # Handle marking data as changed
+ if name in self._dynamic_fields:
+ self._data[name] = value
+ if hasattr(self, '_changed_fields'):
+ self._mark_as_changed(name)
+ try:
+ self__created = self._created
+ except AttributeError:
+ self__created = True
+
+ if (self._is_document and not self__created and
+ name in self._meta.get('shard_key', tuple()) and
+ self._data.get(name) != value):
+ OperationError = _import_class('OperationError')
+ msg = "Shard Keys are immutable. Tried to update %s" % name
+ raise OperationError(msg)
+
+ try:
+ self__initialised = self._initialised
+ except AttributeError:
+ self__initialised = False
+ # Check if the user has created a new instance of a class
+ if (self._is_document and self__initialised
+ and self__created and name == self._meta['id_field']):
+ super(BaseDocument, self).__setattr__('_created', False)
+
+ super(BaseDocument, self).__setattr__(name, value)
+
+ def __getstate__(self):
+ data = {}
+ for k in ('_changed_fields', '_initialised', '_created',
+ '_dynamic_fields', '_fields_ordered'):
+ if hasattr(self, k):
+ data[k] = getattr(self, k)
+ data['_data'] = self.to_mongo()
+ return data
+
+ def __setstate__(self, data):
+ if isinstance(data["_data"], SON):
+ data["_data"] = self.__class__._from_son(data["_data"])._data
+ for k in ('_changed_fields', '_initialised', '_created', '_data',
+ '_dynamic_fields'):
+ if k in data:
+ setattr(self, k, data[k])
+ if '_fields_ordered' in data:
+ setattr(type(self), '_fields_ordered', data['_fields_ordered'])
+ dynamic_fields = data.get('_dynamic_fields') or SON()
+ for k in dynamic_fields.keys():
+ setattr(self, k, data["_data"].get(k))
+
+ def __iter__(self):
+ return iter(self._fields_ordered)
+
+ def __getitem__(self, name):
+ """Dictionary-style field access, return a field's value if present.
+ """
+ try:
+ if name in self._fields_ordered:
+ return getattr(self, name)
+ except AttributeError:
+ pass
+ raise KeyError(name)
+
+ def __setitem__(self, name, value):
+ """Dictionary-style field access, set a field's value.
+ """
+ # Ensure that the field exists before settings its value
+ if not self._dynamic and name not in self._fields:
+ raise KeyError(name)
+ return setattr(self, name, value)
+
+ def __contains__(self, name):
+ try:
+ val = getattr(self, name)
+ return val is not None
+ except AttributeError:
+ return False
+
+ def __len__(self):
+ return len(self._data)
+
+ def __repr__(self):
+ try:
+ u = self.__str__()
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ u = '[Bad Unicode data]'
+ repr_type = str if u is None else type(u)
+ return repr_type('<%s: %s>' % (self.__class__.__name__, u))
+
+ def __str__(self):
+ if hasattr(self, '__unicode__'):
+ if PY3:
+ return self.__unicode__()
+ else:
+ return unicode(self).encode('utf-8')
+ return txt_type('%s object' % self.__class__.__name__)
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__) and hasattr(other, 'id') and other.id is not None:
+ return self.id == other.id
+ if isinstance(other, DBRef):
+ return self._get_collection_name() == other.collection and self.id == other.id
+ if self.id is None:
+ return self is other
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ if getattr(self, 'pk', None) is None:
+ # For new object
+ return super(BaseDocument, self).__hash__()
+ else:
+ return hash(self.pk)
+
+ def clean(self):
+ """
+ Hook for doing document level data cleaning before validation is run.
+
+ Any ValidationError raised by this method will not be associated with
+ a particular field; it will have a special-case association with the
+ field defined by NON_FIELD_ERRORS.
+ """
+ pass
+
+ def get_text_score(self):
+ """
+ Get text score from text query
+ """
+
+ if '_text_score' not in self._data:
+ raise InvalidDocumentError('This document is not originally built from a text query')
+
+ return self._data['_text_score']
+
+ def to_mongo(self, use_db_field=True, fields=None):
+ """
+ Return as SON data ready for use with MongoDB.
+ """
+ if not fields:
+ fields = []
+
+ data = SON()
+ data["_id"] = None
+ data['_cls'] = self._class_name
+ EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
+ # only root fields ['test1.a', 'test2'] => ['test1', 'test2']
+ root_fields = set([f.split('.')[0] for f in fields])
+
+ for field_name in self:
+ if root_fields and field_name not in root_fields:
+ continue
+
+ value = self._data.get(field_name, None)
+ field = self._fields.get(field_name)
+
+ if field is None and self._dynamic:
+ field = self._dynamic_fields.get(field_name)
+
+ if value is not None:
+
+ if isinstance(field, (EmbeddedDocumentField)):
+ if fields:
+ key = '%s.' % field_name
+ embedded_fields = [
+ i.replace(key, '') for i in fields
+ if i.startswith(key)]
+
+ else:
+ embedded_fields = []
+
+ value = field.to_mongo(value, use_db_field=use_db_field,
+ fields=embedded_fields)
+ else:
+ value = field.to_mongo(value)
+
+ # Handle self generating fields
+ if value is None and field._auto_gen:
+ value = field.generate()
+ self._data[field_name] = value
+
+ if value is not None:
+ if use_db_field:
+ data[field.db_field] = value
+ else:
+ data[field.name] = value
+
+ # If "_id" has not been set, then try and set it
+ Document = _import_class("Document")
+ if isinstance(self, Document):
+ if data["_id"] is None:
+ data["_id"] = self._data.get("id", None)
+
+ if data['_id'] is None:
+ data.pop('_id')
+
+ # Only add _cls if allow_inheritance is True
+ if (not hasattr(self, '_meta') or
+ not self._meta.get('allow_inheritance', ALLOW_INHERITANCE)):
+ data.pop('_cls')
+
+ return data
+
+ def validate(self, clean=True):
+ """Ensure that all fields' values are valid and that required fields
+ are present.
+ """
+ # Ensure that each field is matched to a valid value
+ errors = {}
+ if clean:
+ try:
+ self.clean()
+ except ValidationError, error:
+ errors[NON_FIELD_ERRORS] = error
+
+ # Get a list of tuples of field names and their current values
+ fields = [(self._fields.get(name, self._dynamic_fields.get(name)),
+ self._data.get(name)) for name in self._fields_ordered]
+
+ EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
+ GenericEmbeddedDocumentField = _import_class(
+ "GenericEmbeddedDocumentField")
+
+ for field, value in fields:
+ if value is not None:
+ try:
+ if isinstance(field, (EmbeddedDocumentField,
+ GenericEmbeddedDocumentField)):
+ field._validate(value, clean=clean)
+ else:
+ field._validate(value)
+ except ValidationError, error:
+ errors[field.name] = error.errors or error
+ except (ValueError, AttributeError, AssertionError), error:
+ errors[field.name] = error
+ elif field.required and not getattr(field, '_auto_gen', False):
+ errors[field.name] = ValidationError('Field is required',
+ field_name=field.name)
+
+ if errors:
+ pk = "None"
+ if hasattr(self, 'pk'):
+ pk = self.pk
+ elif self._instance and hasattr(self._instance, 'pk'):
+ pk = self._instance.pk
+ message = "ValidationError (%s:%s) " % (self._class_name, pk)
+ raise ValidationError(message, errors=errors)
+
+ def to_json(self, *args, **kwargs):
+ """Converts a document to JSON.
+ :param use_db_field: Set to True by default but enables the output of the json structure with the field names and not the mongodb store db_names in case of set to False
+ """
+ use_db_field = kwargs.pop('use_db_field', True)
+ return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
+
+ @classmethod
+ def from_json(cls, json_data, created=False):
+ """Converts json data to an unsaved document instance"""
+ return cls._from_son(json_util.loads(json_data), created=created)
+
+ def __expand_dynamic_values(self, name, value):
+ """expand any dynamic values to their correct types / values"""
+ if not isinstance(value, (dict, list, tuple)):
+ return value
+
+ EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
+
+ is_list = False
+ if not hasattr(value, 'items'):
+ is_list = True
+ value = dict([(k, v) for k, v in enumerate(value)])
+
+ if not is_list and '_cls' in value:
+ cls = get_document(value['_cls'])
+ return cls(**value)
+
+ data = {}
+ for k, v in value.items():
+ key = name if is_list else k
+ data[k] = self.__expand_dynamic_values(key, v)
+
+ if is_list: # Convert back to a list
+ data_items = sorted(data.items(), key=operator.itemgetter(0))
+ value = [v for k, v in data_items]
+ else:
+ value = data
+
+ # Convert lists / values so we can watch for any changes on them
+ if (isinstance(value, (list, tuple)) and
+ not isinstance(value, BaseList)):
+ if issubclass(type(self), EmbeddedDocumentListField):
+ value = EmbeddedDocumentList(value, self, name)
+ else:
+ value = BaseList(value, self, name)
+ elif isinstance(value, dict) and not isinstance(value, BaseDict):
+ value = BaseDict(value, self, name)
+
+ return value
+
+ def _mark_as_changed(self, key):
+ """Marks a key as explicitly changed by the user
+ """
+ if not key:
+ return
+
+ if not hasattr(self, '_changed_fields'):
+ return
+
+ if '.' in key:
+ key, rest = key.split('.', 1)
+ key = self._db_field_map.get(key, key)
+ key = '%s.%s' % (key, rest)
+ else:
+ key = self._db_field_map.get(key, key)
+
+ if key not in self._changed_fields:
+ self._changed_fields.append(key)
+
+ def _clear_changed_fields(self):
+ """Using get_changed_fields iterate and remove any fields that are
+ marked as changed"""
+ for changed in self._get_changed_fields():
+ parts = changed.split(".")
+ data = self
+ for part in parts:
+ if isinstance(data, list):
+ try:
+ data = data[int(part)]
+ except IndexError:
+ data = None
+ elif isinstance(data, dict):
+ data = data.get(part, None)
+ else:
+ data = getattr(data, part, None)
+ if hasattr(data, "_changed_fields"):
+ if hasattr(data, "_is_document") and data._is_document:
+ continue
+ data._changed_fields = []
+ self._changed_fields = []
+
+ def _nestable_types_changed_fields(self, changed_fields, key, data, inspected):
+ # Loop list / dict fields as they contain documents
+ # Determine the iterator to use
+ if not hasattr(data, 'items'):
+ iterator = enumerate(data)
+ else:
+ iterator = data.iteritems()
+
+ for index, value in iterator:
+ list_key = "%s%s." % (key, index)
+ # don't check anything lower if this key is already marked
+ # as changed.
+ if list_key[:-1] in changed_fields:
+ continue
+ if hasattr(value, '_get_changed_fields'):
+ changed = value._get_changed_fields(inspected)
+ changed_fields += ["%s%s" % (list_key, k)
+ for k in changed if k]
+ elif isinstance(value, (list, tuple, dict)):
+ self._nestable_types_changed_fields(
+ changed_fields, list_key, value, inspected)
+
+ def _get_changed_fields(self, inspected=None):
+ """Returns a list of all fields that have explicitly been changed.
+ """
+ EmbeddedDocument = _import_class("EmbeddedDocument")
+ DynamicEmbeddedDocument = _import_class("DynamicEmbeddedDocument")
+ ReferenceField = _import_class("ReferenceField")
+ changed_fields = []
+ changed_fields += getattr(self, '_changed_fields', [])
+
+ inspected = inspected or set()
+ if hasattr(self, 'id') and isinstance(self.id, Hashable):
+ if self.id in inspected:
+ return changed_fields
+ inspected.add(self.id)
+
+ for field_name in self._fields_ordered:
+ db_field_name = self._db_field_map.get(field_name, field_name)
+ key = '%s.' % db_field_name
+ data = self._data.get(field_name, None)
+ field = self._fields.get(field_name)
+
+ if hasattr(data, 'id'):
+ if data.id in inspected:
+ continue
+ inspected.add(data.id)
+ if isinstance(field, ReferenceField):
+ continue
+ elif (isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument))
+ and db_field_name not in changed_fields):
+ # Find all embedded fields that have been changed
+ changed = data._get_changed_fields(inspected)
+ changed_fields += ["%s%s" % (key, k) for k in changed if k]
+ elif (isinstance(data, (list, tuple, dict)) and
+ db_field_name not in changed_fields):
+ if (hasattr(field, 'field') and
+ isinstance(field.field, ReferenceField)):
+ continue
+ self._nestable_types_changed_fields(
+ changed_fields, key, data, inspected)
+ return changed_fields
+
+ def _delta(self):
+ """Returns the delta (set, unset) of the changes for a document.
+ Gets any values that have been explicitly changed.
+ """
+ # Handles cases where not loaded from_son but has _id
+ doc = self.to_mongo()
+
+ set_fields = self._get_changed_fields()
+ unset_data = {}
+ parts = []
+ if hasattr(self, '_changed_fields'):
+ set_data = {}
+ # Fetch each set item from its path
+ for path in set_fields:
+ parts = path.split('.')
+ d = doc
+ new_path = []
+ for p in parts:
+ if isinstance(d, (ObjectId, DBRef)):
+ break
+ elif isinstance(d, list) and p.isdigit():
+ try:
+ d = d[int(p)]
+ except IndexError:
+ d = None
+ elif hasattr(d, 'get'):
+ d = d.get(p)
+ new_path.append(p)
+ path = '.'.join(new_path)
+ set_data[path] = d
+ else:
+ set_data = doc
+ if '_id' in set_data:
+ del(set_data['_id'])
+
+ # Determine if any changed items were actually unset.
+ for path, value in set_data.items():
+ if value or isinstance(value, (numbers.Number, bool)):
+ continue
+
+ # If we've set a value that ain't the default value dont unset it.
+ default = None
+ if (self._dynamic and len(parts) and parts[0] in
+ self._dynamic_fields):
+ del(set_data[path])
+ unset_data[path] = 1
+ continue
+ elif path in self._fields:
+ default = self._fields[path].default
+ else: # Perform a full lookup for lists / embedded lookups
+ d = self
+ parts = path.split('.')
+ db_field_name = parts.pop()
+ for p in parts:
+ if isinstance(d, list) and p.isdigit():
+ d = d[int(p)]
+ elif (hasattr(d, '__getattribute__') and
+ not isinstance(d, dict)):
+ real_path = d._reverse_db_field_map.get(p, p)
+ d = getattr(d, real_path)
+ else:
+ d = d.get(p)
+
+ if hasattr(d, '_fields'):
+ field_name = d._reverse_db_field_map.get(db_field_name,
+ db_field_name)
+ if field_name in d._fields:
+ default = d._fields.get(field_name).default
+ else:
+ default = None
+
+ if default is not None:
+ if callable(default):
+ default = default()
+
+ if default != value:
+ continue
+
+ del(set_data[path])
+ unset_data[path] = 1
+ return set_data, unset_data
+
+ @classmethod
+ def _get_collection_name(cls):
+ """Returns the collection name for this class.
+ """
+ return cls._meta.get('collection', None)
+
+ @classmethod
+ def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False):
+ """Create an instance of a Document (subclass) from a PyMongo SON.
+ """
+ if not only_fields:
+ only_fields = []
+
+ # get the class name from the document, falling back to the given
+ # class if unavailable
+ class_name = son.get('_cls', cls._class_name)
+ data = dict(("%s" % key, value) for key, value in son.iteritems())
+
+ # Return correct subclass for document type
+ if class_name != cls._class_name:
+ cls = get_document(class_name)
+
+ changed_fields = []
+ errors_dict = {}
+
+ fields = cls._fields
+ if not _auto_dereference:
+ fields = copy.copy(fields)
+
+ for field_name, field in fields.iteritems():
+ field._auto_dereference = _auto_dereference
+ if field.db_field in data:
+ value = data[field.db_field]
+ try:
+ data[field_name] = (value if value is None
+ else field.to_python(value))
+ if field_name != field.db_field:
+ del data[field.db_field]
+ except (AttributeError, ValueError), e:
+ errors_dict[field_name] = e
+ elif field.default:
+ default = field.default
+ if callable(default):
+ default = default()
+ if isinstance(default, BaseDocument):
+ changed_fields.append(field_name)
+ elif not only_fields or field_name in only_fields:
+ changed_fields.append(field_name)
+
+ if errors_dict:
+ errors = "\n".join(["%s - %s" % (k, v)
+ for k, v in errors_dict.items()])
+ msg = ("Invalid data to create a `%s` instance.\n%s"
+ % (cls._class_name, errors))
+ raise InvalidDocumentError(msg)
+
+ if cls.STRICT:
+ data = dict((k, v)
+ for k, v in data.iteritems() if k in cls._fields)
+ obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data)
+ obj._changed_fields = changed_fields
+ if not _auto_dereference:
+ obj._fields = fields
+
+ return obj
+
+ @classmethod
+ def _build_index_specs(cls, meta_indexes):
+ """Generate and merge the full index specs
+ """
+
+ geo_indices = cls._geo_indices()
+ unique_indices = cls._unique_with_indexes()
+ index_specs = [cls._build_index_spec(spec)
+ for spec in meta_indexes]
+
+ def merge_index_specs(index_specs, indices):
+ if not indices:
+ return index_specs
+
+ spec_fields = [v['fields']
+ for k, v in enumerate(index_specs)]
+ # Merge unique_indexes with existing specs
+ for k, v in enumerate(indices):
+ if v['fields'] in spec_fields:
+ index_specs[spec_fields.index(v['fields'])].update(v)
+ else:
+ index_specs.append(v)
+ return index_specs
+
+ index_specs = merge_index_specs(index_specs, geo_indices)
+ index_specs = merge_index_specs(index_specs, unique_indices)
+ return index_specs
+
+ @classmethod
+ def _build_index_spec(cls, spec):
+ """Build a PyMongo index spec from a MongoEngine index spec.
+ """
+ if isinstance(spec, basestring):
+ spec = {'fields': [spec]}
+ elif isinstance(spec, (list, tuple)):
+ spec = {'fields': list(spec)}
+ elif isinstance(spec, dict):
+ spec = dict(spec)
+
+ index_list = []
+ direction = None
+
+ # Check to see if we need to include _cls
+ allow_inheritance = cls._meta.get('allow_inheritance',
+ ALLOW_INHERITANCE)
+ include_cls = (allow_inheritance and not spec.get('sparse', False) and
+ spec.get('cls', True))
+
+ # 733: don't include cls if index_cls is False unless there is an explicit cls with the index
+ include_cls = include_cls and (spec.get('cls', False) or cls._meta.get('index_cls', True))
+ if "cls" in spec:
+ spec.pop('cls')
+ for key in spec['fields']:
+ # If inherited spec continue
+ if isinstance(key, (list, tuple)):
+ continue
+
+ # ASCENDING from +
+ # DESCENDING from -
+ # GEO2D from *
+ # TEXT from $
+ direction = pymongo.ASCENDING
+ if key.startswith("-"):
+ direction = pymongo.DESCENDING
+ elif key.startswith("*"):
+ direction = pymongo.GEO2D
+ elif key.startswith("$"):
+ direction = pymongo.TEXT
+ if key.startswith(("+", "-", "*", "$")):
+ key = key[1:]
+
+ # Use real field name, do it manually because we need field
+ # objects for the next part (list field checking)
+ parts = key.split('.')
+ if parts in (['pk'], ['id'], ['_id']):
+ key = '_id'
+ fields = []
+ else:
+ fields = cls._lookup_field(parts)
+ parts = []
+ for field in fields:
+ try:
+ if field != "_id":
+ field = field.db_field
+ except AttributeError:
+ pass
+ parts.append(field)
+ key = '.'.join(parts)
+ index_list.append((key, direction))
+
+ # Don't add cls to a geo index
+ if include_cls and direction is not pymongo.GEO2D:
+ index_list.insert(0, ('_cls', 1))
+
+ if index_list:
+ spec['fields'] = index_list
+ if spec.get('sparse', False) and len(spec['fields']) > 1:
+ raise ValueError(
+ 'Sparse indexes can only have one field in them. '
+ 'See https://jira.mongodb.org/browse/SERVER-2193')
+
+ return spec
+
+ @classmethod
+ def _unique_with_indexes(cls, namespace=""):
+ """
+ Find and set unique indexes
+ """
+ unique_indexes = []
+ for field_name, field in cls._fields.items():
+ sparse = field.sparse
+ # Generate a list of indexes needed by uniqueness constraints
+ if field.unique:
+ unique_fields = [field.db_field]
+
+ # Add any unique_with fields to the back of the index spec
+ if field.unique_with:
+ if isinstance(field.unique_with, basestring):
+ field.unique_with = [field.unique_with]
+
+ # Convert unique_with field names to real field names
+ unique_with = []
+ for other_name in field.unique_with:
+ parts = other_name.split('.')
+ # Lookup real name
+ parts = cls._lookup_field(parts)
+ name_parts = [part.db_field for part in parts]
+ unique_with.append('.'.join(name_parts))
+ # Unique field should be required
+ parts[-1].required = True
+ sparse = (not sparse and
+ parts[-1].name not in cls.__dict__)
+ unique_fields += unique_with
+
+ # Add the new index to the list
+ fields = [("%s%s" % (namespace, f), pymongo.ASCENDING)
+ for f in unique_fields]
+ index = {'fields': fields, 'unique': True, 'sparse': sparse}
+ unique_indexes.append(index)
+
+ if field.__class__.__name__ == "ListField":
+ field = field.field
+
+ # Grab any embedded document field unique indexes
+ if (field.__class__.__name__ == "EmbeddedDocumentField" and
+ field.document_type != cls):
+ field_namespace = "%s." % field_name
+ doc_cls = field.document_type
+ unique_indexes += doc_cls._unique_with_indexes(field_namespace)
+
+ return unique_indexes
+
+ @classmethod
+ def _geo_indices(cls, inspected=None, parent_field=None):
+ inspected = inspected or []
+ geo_indices = []
+ inspected.append(cls)
+
+ geo_field_type_names = ["EmbeddedDocumentField", "GeoPointField",
+ "PointField", "LineStringField", "PolygonField"]
+
+ geo_field_types = tuple([_import_class(field)
+ for field in geo_field_type_names])
+
+ for field in cls._fields.values():
+ if not isinstance(field, geo_field_types):
+ continue
+ if hasattr(field, 'document_type'):
+ field_cls = field.document_type
+ if field_cls in inspected:
+ continue
+ if hasattr(field_cls, '_geo_indices'):
+ geo_indices += field_cls._geo_indices(
+ inspected, parent_field=field.db_field)
+ elif field._geo_index:
+ field_name = field.db_field
+ if parent_field:
+ field_name = "%s.%s" % (parent_field, field_name)
+ geo_indices.append({'fields':
+ [(field_name, field._geo_index)]})
+ return geo_indices
+
+ @classmethod
+ def _lookup_field(cls, parts):
+ """Lookup a field based on its attribute and return a list containing
+ the field's parents and the field.
+ """
+
+ ListField = _import_class("ListField")
+
+ if not isinstance(parts, (list, tuple)):
+ parts = [parts]
+ fields = []
+ field = None
+
+ for field_name in parts:
+ # Handle ListField indexing:
+ if field_name.isdigit() and isinstance(field, ListField):
+ new_field = field.field
+ fields.append(field_name)
+ continue
+
+ if field is None:
+ # Look up first field from the document
+ if field_name == 'pk':
+ # Deal with "primary key" alias
+ field_name = cls._meta['id_field']
+ if field_name in cls._fields:
+ field = cls._fields[field_name]
+ elif cls._dynamic:
+ DynamicField = _import_class('DynamicField')
+ field = DynamicField(db_field=field_name)
+ elif cls._meta.get("allow_inheritance", False) or cls._meta.get("abstract", False):
+ # 744: in case the field is defined in a subclass
+ field = None
+ for subcls in cls.__subclasses__():
+ try:
+ field = subcls._lookup_field([field_name])[0]
+ except LookUpError:
+ continue
+
+ if field is not None:
+ break
+ else:
+ raise LookUpError('Cannot resolve field "%s"' % field_name)
+ else:
+ raise LookUpError('Cannot resolve field "%s"'
+ % field_name)
+ else:
+ ReferenceField = _import_class('ReferenceField')
+ GenericReferenceField = _import_class('GenericReferenceField')
+ if isinstance(field, (ReferenceField, GenericReferenceField)):
+ raise LookUpError('Cannot perform join in mongoDB: %s' %
+ '__'.join(parts))
+ if hasattr(getattr(field, 'field', None), 'lookup_member'):
+ new_field = field.field.lookup_member(field_name)
+ else:
+ # Look up subfield on the previous field
+ new_field = field.lookup_member(field_name)
+ if not new_field and isinstance(field, ComplexBaseField):
+ if hasattr(field.field, 'document_type') and cls._dynamic \
+ and field.field.document_type._dynamic:
+ DynamicField = _import_class('DynamicField')
+ new_field = DynamicField(db_field=field_name)
+ else:
+ fields.append(field_name)
+ continue
+ elif not new_field and hasattr(field, 'document_type') and cls._dynamic \
+ and field.document_type._dynamic:
+ DynamicField = _import_class('DynamicField')
+ new_field = DynamicField(db_field=field_name)
+ elif not new_field:
+ raise LookUpError('Cannot resolve field "%s"'
+ % field_name)
+ field = new_field # update field to the new field type
+ fields.append(field)
+ return fields
+
+ @classmethod
+ def _translate_field_name(cls, field, sep='.'):
+ """Translate a field attribute name to a database field name.
+ """
+ parts = field.split(sep)
+ parts = [f.db_field for f in cls._lookup_field(parts)]
+ return '.'.join(parts)
+
+ def __set_field_display(self):
+ """Dynamically set the display value for a field with choices"""
+ for attr_name, field in self._fields.items():
+ if field.choices:
+ if self._dynamic:
+ obj = self
+ else:
+ obj = type(self)
+ setattr(obj,
+ 'get_%s_display' % attr_name,
+ partial(self.__get_field_display, field=field))
+
+ def __get_field_display(self, field):
+ """Returns the display value for a choice field"""
+ value = getattr(self, field.name)
+ if field.choices and isinstance(field.choices[0], (list, tuple)):
+ return dict(field.choices).get(value, value)
+ return value
diff --git a/awx/lib/site-packages/mongoengine/base/fields.py b/awx/lib/site-packages/mongoengine/base/fields.py
new file mode 100644
index 0000000000..aa16804e61
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/base/fields.py
@@ -0,0 +1,596 @@
+import operator
+import warnings
+import weakref
+
+from bson import DBRef, ObjectId, SON
+import pymongo
+
+from mongoengine.common import _import_class
+from mongoengine.errors import ValidationError
+
+from mongoengine.base.common import ALLOW_INHERITANCE
+from mongoengine.base.datastructures import (
+ BaseDict, BaseList, EmbeddedDocumentList
+)
+
+__all__ = ("BaseField", "ComplexBaseField",
+ "ObjectIdField", "GeoJsonBaseField")
+
+
+class BaseField(object):
+
+ """A base class for fields in a MongoDB document. Instances of this class
+ may be added to subclasses of `Document` to define a document's schema.
+
+ .. versionchanged:: 0.5 - added verbose and help text
+ """
+
+ name = None
+ _geo_index = False
+ _auto_gen = False # Call `generate` to generate a value
+ _auto_dereference = True
+
+ # These track each time a Field instance is created. Used to retain order.
+ # The auto_creation_counter is used for fields that MongoEngine implicitly
+ # creates, creation_counter is used for all user-specified fields.
+ creation_counter = 0
+ auto_creation_counter = -1
+
+ def __init__(self, db_field=None, name=None, required=False, default=None,
+ unique=False, unique_with=None, primary_key=False,
+ validation=None, choices=None, verbose_name=None,
+ help_text=None, null=False, sparse=False):
+ """
+ :param db_field: The database field to store this field in
+ (defaults to the name of the field)
+ :param name: Depreciated - use db_field
+ :param required: If the field is required. Whether it has to have a
+ value or not. Defaults to False.
+ :param default: (optional) The default value for this field if no value
+ has been set (or if the value has been unset). It can be a
+ callable.
+ :param unique: Is the field value unique or not. Defaults to False.
+ :param unique_with: (optional) The other field this field should be
+ unique with.
+ :param primary_key: Mark this field as the primary key. Defaults to False.
+ :param validation: (optional) A callable to validate the value of the
+ field. Generally this is deprecated in favour of the
+ `FIELD.validate` method
+ :param choices: (optional) The valid choices
+ :param verbose_name: (optional) The verbose name for the field.
+ Designed to be human readable and is often used when generating
+ model forms from the document model.
+ :param help_text: (optional) The help text for this field and is often
+ used when generating model forms from the document model.
+ :param null: (optional) Is the field value can be null. If no and there is a default value
+ then the default value is set
+ :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False`
+ means that uniqueness won't be enforced for `None` values
+ """
+ self.db_field = (db_field or name) if not primary_key else '_id'
+
+ if name:
+ msg = "Fields' 'name' attribute deprecated in favour of 'db_field'"
+ warnings.warn(msg, DeprecationWarning)
+ self.required = required or primary_key
+ self.default = default
+ self.unique = bool(unique or unique_with)
+ self.unique_with = unique_with
+ self.primary_key = primary_key
+ self.validation = validation
+ self.choices = choices
+ self.verbose_name = verbose_name
+ self.help_text = help_text
+ self.null = null
+ self.sparse = sparse
+
+ # Adjust the appropriate creation counter, and save our local copy.
+ if self.db_field == '_id':
+ self.creation_counter = BaseField.auto_creation_counter
+ BaseField.auto_creation_counter -= 1
+ else:
+ self.creation_counter = BaseField.creation_counter
+ BaseField.creation_counter += 1
+
+ def __get__(self, instance, owner):
+ """Descriptor for retrieving a value from a field in a document.
+ """
+ if instance is None:
+ # Document class being used rather than a document object
+ return self
+
+ # Get value from document instance if available
+ return instance._data.get(self.name)
+
+ def __set__(self, instance, value):
+ """Descriptor for assigning a value to a field in a document.
+ """
+
+ # If setting to None and theres a default
+ # Then set the value to the default value
+ if value is None:
+ if self.null:
+ value = None
+ elif self.default is not None:
+ value = self.default
+ if callable(value):
+ value = value()
+
+ if instance._initialised:
+ try:
+ if (self.name not in instance._data or
+ instance._data[self.name] != value):
+ instance._mark_as_changed(self.name)
+ except:
+ # Values cant be compared eg: naive and tz datetimes
+ # So mark it as changed
+ instance._mark_as_changed(self.name)
+
+ EmbeddedDocument = _import_class('EmbeddedDocument')
+ if isinstance(value, EmbeddedDocument):
+ value._instance = weakref.proxy(instance)
+ instance._data[self.name] = value
+
+ def error(self, message="", errors=None, field_name=None):
+ """Raises a ValidationError.
+ """
+ field_name = field_name if field_name else self.name
+ raise ValidationError(message, errors=errors, field_name=field_name)
+
+ def to_python(self, value):
+ """Convert a MongoDB-compatible type to a Python type.
+ """
+ return value
+
+ def to_mongo(self, value):
+ """Convert a Python type to a MongoDB-compatible type.
+ """
+ return self.to_python(value)
+
+ def prepare_query_value(self, op, value):
+ """Prepare a value that is being used in a query for PyMongo.
+ """
+ return value
+
+ def validate(self, value, clean=True):
+ """Perform validation on a value.
+ """
+ pass
+
+ def _validate(self, value, **kwargs):
+ Document = _import_class('Document')
+ EmbeddedDocument = _import_class('EmbeddedDocument')
+
+ # Check the Choices Constraint
+ if self.choices:
+
+ choice_list = self.choices
+ if isinstance(self.choices[0], (list, tuple)):
+ choice_list = [k for k, v in self.choices]
+
+ # Choices which are other types of Documents
+ if isinstance(value, (Document, EmbeddedDocument)):
+ if not any(isinstance(value, c) for c in choice_list):
+ self.error(
+ 'Value must be instance of %s' % unicode(choice_list)
+ )
+ # Choices which are types other than Documents
+ elif value not in choice_list:
+ self.error('Value must be one of %s' % unicode(choice_list))
+
+ # check validation argument
+ if self.validation is not None:
+ if callable(self.validation):
+ if not self.validation(value):
+ self.error('Value does not match custom validation method')
+ else:
+ raise ValueError('validation argument for "%s" must be a '
+ 'callable.' % self.name)
+
+ self.validate(value, **kwargs)
+
+
+class ComplexBaseField(BaseField):
+
+ """Handles complex fields, such as lists / dictionaries.
+
+ Allows for nesting of embedded documents inside complex types.
+ Handles the lazy dereferencing of a queryset by lazily dereferencing all
+ items in a list / dict rather than one at a time.
+
+ .. versionadded:: 0.5
+ """
+
+ field = None
+
+ def __get__(self, instance, owner):
+ """Descriptor to automatically dereference references.
+ """
+ if instance is None:
+ # Document class being used rather than a document object
+ return self
+
+ ReferenceField = _import_class('ReferenceField')
+ GenericReferenceField = _import_class('GenericReferenceField')
+ EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField')
+ dereference = (self._auto_dereference and
+ (self.field is None or isinstance(self.field,
+ (GenericReferenceField, ReferenceField))))
+
+ _dereference = _import_class("DeReference")()
+
+ self._auto_dereference = instance._fields[self.name]._auto_dereference
+ if instance._initialised and dereference and instance._data.get(self.name):
+ instance._data[self.name] = _dereference(
+ instance._data.get(self.name), max_depth=1, instance=instance,
+ name=self.name
+ )
+
+ value = super(ComplexBaseField, self).__get__(instance, owner)
+
+ # Convert lists / values so we can watch for any changes on them
+ if isinstance(value, (list, tuple)):
+ if (issubclass(type(self), EmbeddedDocumentListField) and
+ not isinstance(value, EmbeddedDocumentList)):
+ value = EmbeddedDocumentList(value, instance, self.name)
+ elif not isinstance(value, BaseList):
+ value = BaseList(value, instance, self.name)
+ instance._data[self.name] = value
+ elif isinstance(value, dict) and not isinstance(value, BaseDict):
+ value = BaseDict(value, instance, self.name)
+ instance._data[self.name] = value
+
+ if (self._auto_dereference and instance._initialised and
+ isinstance(value, (BaseList, BaseDict))
+ and not value._dereferenced):
+ value = _dereference(
+ value, max_depth=1, instance=instance, name=self.name
+ )
+ value._dereferenced = True
+ instance._data[self.name] = value
+
+ return value
+
+ def to_python(self, value):
+ """Convert a MongoDB-compatible type to a Python type.
+ """
+ Document = _import_class('Document')
+
+ if isinstance(value, basestring):
+ return value
+
+ if hasattr(value, 'to_python'):
+ return value.to_python()
+
+ is_list = False
+ if not hasattr(value, 'items'):
+ try:
+ is_list = True
+ value = dict([(k, v) for k, v in enumerate(value)])
+ except TypeError: # Not iterable return the value
+ return value
+
+ if self.field:
+ value_dict = dict([(key, self.field.to_python(item))
+ for key, item in value.items()])
+ else:
+ value_dict = {}
+ for k, v in value.items():
+ if isinstance(v, Document):
+ # We need the id from the saved object to create the DBRef
+ if v.pk is None:
+ self.error('You can only reference documents once they'
+ ' have been saved to the database')
+ collection = v._get_collection_name()
+ value_dict[k] = DBRef(collection, v.pk)
+ elif hasattr(v, 'to_python'):
+ value_dict[k] = v.to_python()
+ else:
+ value_dict[k] = self.to_python(v)
+
+ if is_list: # Convert back to a list
+ return [v for k, v in sorted(value_dict.items(),
+ key=operator.itemgetter(0))]
+ return value_dict
+
+ def to_mongo(self, value):
+ """Convert a Python type to a MongoDB-compatible type.
+ """
+ Document = _import_class("Document")
+ EmbeddedDocument = _import_class("EmbeddedDocument")
+ GenericReferenceField = _import_class("GenericReferenceField")
+
+ if isinstance(value, basestring):
+ return value
+
+ if hasattr(value, 'to_mongo'):
+ if isinstance(value, Document):
+ return GenericReferenceField().to_mongo(value)
+ cls = value.__class__
+ val = value.to_mongo()
+ # If we its a document thats not inherited add _cls
+ if (isinstance(value, EmbeddedDocument)):
+ val['_cls'] = cls.__name__
+ return val
+
+ is_list = False
+ if not hasattr(value, 'items'):
+ try:
+ is_list = True
+ value = dict([(k, v) for k, v in enumerate(value)])
+ except TypeError: # Not iterable return the value
+ return value
+
+ if self.field:
+ value_dict = dict([(key, self.field.to_mongo(item))
+ for key, item in value.iteritems()])
+ else:
+ value_dict = {}
+ for k, v in value.iteritems():
+ if isinstance(v, Document):
+ # We need the id from the saved object to create the DBRef
+ if v.pk is None:
+ self.error('You can only reference documents once they'
+ ' have been saved to the database')
+
+ # If its a document that is not inheritable it won't have
+ # any _cls data so make it a generic reference allows
+ # us to dereference
+ meta = getattr(v, '_meta', {})
+ allow_inheritance = (
+ meta.get('allow_inheritance', ALLOW_INHERITANCE)
+ is True)
+ if not allow_inheritance and not self.field:
+ value_dict[k] = GenericReferenceField().to_mongo(v)
+ else:
+ collection = v._get_collection_name()
+ value_dict[k] = DBRef(collection, v.pk)
+ elif hasattr(v, 'to_mongo'):
+ cls = v.__class__
+ val = v.to_mongo()
+ # If we its a document thats not inherited add _cls
+ if (isinstance(v, (Document, EmbeddedDocument))):
+ val['_cls'] = cls.__name__
+ value_dict[k] = val
+ else:
+ value_dict[k] = self.to_mongo(v)
+
+ if is_list: # Convert back to a list
+ return [v for k, v in sorted(value_dict.items(),
+ key=operator.itemgetter(0))]
+ return value_dict
+
+ def validate(self, value):
+ """If field is provided ensure the value is valid.
+ """
+ errors = {}
+ if self.field:
+ if hasattr(value, 'iteritems') or hasattr(value, 'items'):
+ sequence = value.iteritems()
+ else:
+ sequence = enumerate(value)
+ for k, v in sequence:
+ try:
+ self.field._validate(v)
+ except ValidationError, error:
+ errors[k] = error.errors or error
+ except (ValueError, AssertionError), error:
+ errors[k] = error
+
+ if errors:
+ field_class = self.field.__class__.__name__
+ self.error('Invalid %s item (%s)' % (field_class, value),
+ errors=errors)
+ # Don't allow empty values if required
+ if self.required and not value:
+ self.error('Field is required and cannot be empty')
+
+ def prepare_query_value(self, op, value):
+ return self.to_mongo(value)
+
+ def lookup_member(self, member_name):
+ if self.field:
+ return self.field.lookup_member(member_name)
+ return None
+
+ def _set_owner_document(self, owner_document):
+ if self.field:
+ self.field.owner_document = owner_document
+ self._owner_document = owner_document
+
+ def _get_owner_document(self, owner_document):
+ self._owner_document = owner_document
+
+ owner_document = property(_get_owner_document, _set_owner_document)
+
+
+class ObjectIdField(BaseField):
+
+ """A field wrapper around MongoDB's ObjectIds.
+ """
+
+ def to_python(self, value):
+ if not isinstance(value, ObjectId):
+ value = ObjectId(value)
+ return value
+
+ def to_mongo(self, value):
+ if not isinstance(value, ObjectId):
+ try:
+ return ObjectId(unicode(value))
+ except Exception, e:
+ # e.message attribute has been deprecated since Python 2.6
+ self.error(unicode(e))
+ return value
+
+ def prepare_query_value(self, op, value):
+ return self.to_mongo(value)
+
+ def validate(self, value):
+ try:
+ ObjectId(unicode(value))
+ except:
+ self.error('Invalid Object ID')
+
+
+class GeoJsonBaseField(BaseField):
+
+ """A geo json field storing a geojson style object.
+
+ .. versionadded:: 0.8
+ """
+
+ _geo_index = pymongo.GEOSPHERE
+ _type = "GeoBase"
+
+ def __init__(self, auto_index=True, *args, **kwargs):
+ """
+ :param bool auto_index: Automatically create a "2dsphere" index.\
+ Defaults to `True`.
+ """
+ self._name = "%sField" % self._type
+ if not auto_index:
+ self._geo_index = False
+ super(GeoJsonBaseField, self).__init__(*args, **kwargs)
+
+ def validate(self, value):
+ """Validate the GeoJson object based on its type
+ """
+ if isinstance(value, dict):
+ if set(value.keys()) == set(['type', 'coordinates']):
+ if value['type'] != self._type:
+ self.error('%s type must be "%s"' %
+ (self._name, self._type))
+ return self.validate(value['coordinates'])
+ else:
+ self.error('%s can only accept a valid GeoJson dictionary'
+ ' or lists of (x, y)' % self._name)
+ return
+ elif not isinstance(value, (list, tuple)):
+ self.error('%s can only accept lists of [x, y]' % self._name)
+ return
+
+ validate = getattr(self, "_validate_%s" % self._type.lower())
+ error = validate(value)
+ if error:
+ self.error(error)
+
+ def _validate_polygon(self, value, top_level=True):
+ if not isinstance(value, (list, tuple)):
+ return 'Polygons must contain list of linestrings'
+
+ # Quick and dirty validator
+ try:
+ value[0][0][0]
+ except:
+ return "Invalid Polygon must contain at least one valid linestring"
+
+ errors = []
+ for val in value:
+ error = self._validate_linestring(val, False)
+ if not error and val[0] != val[-1]:
+ error = 'LineStrings must start and end at the same point'
+ if error and error not in errors:
+ errors.append(error)
+ if errors:
+ if top_level:
+ return "Invalid Polygon:\n%s" % ", ".join(errors)
+ else:
+ return "%s" % ", ".join(errors)
+
+ def _validate_linestring(self, value, top_level=True):
+ """Validates a linestring"""
+ if not isinstance(value, (list, tuple)):
+ return 'LineStrings must contain list of coordinate pairs'
+
+ # Quick and dirty validator
+ try:
+ value[0][0]
+ except:
+ return "Invalid LineString must contain at least one valid point"
+
+ errors = []
+ for val in value:
+ error = self._validate_point(val)
+ if error and error not in errors:
+ errors.append(error)
+ if errors:
+ if top_level:
+ return "Invalid LineString:\n%s" % ", ".join(errors)
+ else:
+ return "%s" % ", ".join(errors)
+
+ def _validate_point(self, value):
+ """Validate each set of coords"""
+ if not isinstance(value, (list, tuple)):
+ return 'Points must be a list of coordinate pairs'
+ elif not len(value) == 2:
+ return "Value (%s) must be a two-dimensional point" % repr(value)
+ elif (not isinstance(value[0], (float, int)) or
+ not isinstance(value[1], (float, int))):
+ return "Both values (%s) in point must be float or int" % repr(value)
+
+ def _validate_multipoint(self, value):
+ if not isinstance(value, (list, tuple)):
+ return 'MultiPoint must be a list of Point'
+
+ # Quick and dirty validator
+ try:
+ value[0][0]
+ except:
+ return "Invalid MultiPoint must contain at least one valid point"
+
+ errors = []
+ for point in value:
+ error = self._validate_point(point)
+ if error and error not in errors:
+ errors.append(error)
+
+ if errors:
+ return "%s" % ", ".join(errors)
+
+ def _validate_multilinestring(self, value, top_level=True):
+ if not isinstance(value, (list, tuple)):
+ return 'MultiLineString must be a list of LineString'
+
+ # Quick and dirty validator
+ try:
+ value[0][0][0]
+ except:
+ return "Invalid MultiLineString must contain at least one valid linestring"
+
+ errors = []
+ for linestring in value:
+ error = self._validate_linestring(linestring, False)
+ if error and error not in errors:
+ errors.append(error)
+
+ if errors:
+ if top_level:
+ return "Invalid MultiLineString:\n%s" % ", ".join(errors)
+ else:
+ return "%s" % ", ".join(errors)
+
+ def _validate_multipolygon(self, value):
+ if not isinstance(value, (list, tuple)):
+ return 'MultiPolygon must be a list of Polygon'
+
+ # Quick and dirty validator
+ try:
+ value[0][0][0][0]
+ except:
+ return "Invalid MultiPolygon must contain at least one valid Polygon"
+
+ errors = []
+ for polygon in value:
+ error = self._validate_polygon(polygon, False)
+ if error and error not in errors:
+ errors.append(error)
+
+ if errors:
+ return "Invalid MultiPolygon:\n%s" % ", ".join(errors)
+
+ def to_mongo(self, value):
+ if isinstance(value, dict):
+ return value
+ return SON([("type", self._type), ("coordinates", value)])
diff --git a/awx/lib/site-packages/mongoengine/base/metaclasses.py b/awx/lib/site-packages/mongoengine/base/metaclasses.py
new file mode 100644
index 0000000000..48da84f42c
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/base/metaclasses.py
@@ -0,0 +1,431 @@
+import warnings
+
+import pymongo
+
+from mongoengine.common import _import_class
+from mongoengine.errors import InvalidDocumentError
+from mongoengine.python_support import PY3
+from mongoengine.queryset import (DO_NOTHING, DoesNotExist,
+ MultipleObjectsReturned,
+ QuerySet, QuerySetManager)
+
+from mongoengine.base.common import _document_registry, ALLOW_INHERITANCE
+from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField
+
+__all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass')
+
+
+class DocumentMetaclass(type):
+
+ """Metaclass for all documents.
+ """
+
+ def __new__(cls, name, bases, attrs):
+ flattened_bases = cls._get_bases(bases)
+ super_new = super(DocumentMetaclass, cls).__new__
+
+ # If a base class just call super
+ metaclass = attrs.get('my_metaclass')
+ if metaclass and issubclass(metaclass, DocumentMetaclass):
+ return super_new(cls, name, bases, attrs)
+
+ attrs['_is_document'] = attrs.get('_is_document', False)
+ attrs['_cached_reference_fields'] = []
+
+ # EmbeddedDocuments could have meta data for inheritance
+ if 'meta' in attrs:
+ attrs['_meta'] = attrs.pop('meta')
+
+ # EmbeddedDocuments should inherit meta data
+ if '_meta' not in attrs:
+ meta = MetaDict()
+ for base in flattened_bases[::-1]:
+ # Add any mixin metadata from plain objects
+ if hasattr(base, 'meta'):
+ meta.merge(base.meta)
+ elif hasattr(base, '_meta'):
+ meta.merge(base._meta)
+ attrs['_meta'] = meta
+ attrs['_meta']['abstract'] = False # 789: EmbeddedDocument shouldn't inherit abstract
+
+ if attrs['_meta'].get('allow_inheritance', ALLOW_INHERITANCE):
+ StringField = _import_class('StringField')
+ attrs['_cls'] = StringField()
+
+ # Handle document Fields
+
+ # Merge all fields from subclasses
+ doc_fields = {}
+ for base in flattened_bases[::-1]:
+ if hasattr(base, '_fields'):
+ doc_fields.update(base._fields)
+
+ # Standard object mixin - merge in any Fields
+ if not hasattr(base, '_meta'):
+ base_fields = {}
+ for attr_name, attr_value in base.__dict__.iteritems():
+ if not isinstance(attr_value, BaseField):
+ continue
+ attr_value.name = attr_name
+ if not attr_value.db_field:
+ attr_value.db_field = attr_name
+ base_fields[attr_name] = attr_value
+
+ doc_fields.update(base_fields)
+
+ # Discover any document fields
+ field_names = {}
+ for attr_name, attr_value in attrs.iteritems():
+ if not isinstance(attr_value, BaseField):
+ continue
+ attr_value.name = attr_name
+ if not attr_value.db_field:
+ attr_value.db_field = attr_name
+ doc_fields[attr_name] = attr_value
+
+ # Count names to ensure no db_field redefinitions
+ field_names[attr_value.db_field] = field_names.get(
+ attr_value.db_field, 0) + 1
+
+ # Ensure no duplicate db_fields
+ duplicate_db_fields = [k for k, v in field_names.items() if v > 1]
+ if duplicate_db_fields:
+ msg = ("Multiple db_fields defined for: %s " %
+ ", ".join(duplicate_db_fields))
+ raise InvalidDocumentError(msg)
+
+ # Set _fields and db_field maps
+ attrs['_fields'] = doc_fields
+ attrs['_db_field_map'] = dict([(k, getattr(v, 'db_field', k))
+ for k, v in doc_fields.iteritems()])
+ attrs['_reverse_db_field_map'] = dict(
+ (v, k) for k, v in attrs['_db_field_map'].iteritems())
+
+ attrs['_fields_ordered'] = tuple(i[1] for i in sorted(
+ (v.creation_counter, v.name)
+ for v in doc_fields.itervalues()))
+
+ #
+ # Set document hierarchy
+ #
+ superclasses = ()
+ class_name = [name]
+ for base in flattened_bases:
+ if (not getattr(base, '_is_base_cls', True) and
+ not getattr(base, '_meta', {}).get('abstract', True)):
+ # Collate heirarchy for _cls and _subclasses
+ class_name.append(base.__name__)
+
+ if hasattr(base, '_meta'):
+ # Warn if allow_inheritance isn't set and prevent
+ # inheritance of classes where inheritance is set to False
+ allow_inheritance = base._meta.get('allow_inheritance',
+ ALLOW_INHERITANCE)
+ if (allow_inheritance is not True and
+ not base._meta.get('abstract')):
+ raise ValueError('Document %s may not be subclassed' %
+ base.__name__)
+
+ # Get superclasses from last base superclass
+ document_bases = [b for b in flattened_bases
+ if hasattr(b, '_class_name')]
+ if document_bases:
+ superclasses = document_bases[0]._superclasses
+ superclasses += (document_bases[0]._class_name, )
+
+ _cls = '.'.join(reversed(class_name))
+ attrs['_class_name'] = _cls
+ attrs['_superclasses'] = superclasses
+ attrs['_subclasses'] = (_cls, )
+ attrs['_types'] = attrs['_subclasses'] # TODO depreciate _types
+
+ # Create the new_class
+ new_class = super_new(cls, name, bases, attrs)
+
+ # Set _subclasses
+ for base in document_bases:
+ if _cls not in base._subclasses:
+ base._subclasses += (_cls,)
+ base._types = base._subclasses # TODO depreciate _types
+
+ (Document, EmbeddedDocument, DictField,
+ CachedReferenceField) = cls._import_classes()
+
+ if issubclass(new_class, Document):
+ new_class._collection = None
+
+ # Add class to the _document_registry
+ _document_registry[new_class._class_name] = new_class
+
+ # In Python 2, User-defined methods objects have special read-only
+ # attributes 'im_func' and 'im_self' which contain the function obj
+ # and class instance object respectively. With Python 3 these special
+ # attributes have been replaced by __func__ and __self__. The Blinker
+ # module continues to use im_func and im_self, so the code below
+ # copies __func__ into im_func and __self__ into im_self for
+ # classmethod objects in Document derived classes.
+ if PY3:
+ for key, val in new_class.__dict__.items():
+ if isinstance(val, classmethod):
+ f = val.__get__(new_class)
+ if hasattr(f, '__func__') and not hasattr(f, 'im_func'):
+ f.__dict__.update({'im_func': getattr(f, '__func__')})
+ if hasattr(f, '__self__') and not hasattr(f, 'im_self'):
+ f.__dict__.update({'im_self': getattr(f, '__self__')})
+
+ # Handle delete rules
+ for field in new_class._fields.itervalues():
+ f = field
+ f.owner_document = new_class
+ delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING)
+ if isinstance(f, CachedReferenceField):
+
+ if issubclass(new_class, EmbeddedDocument):
+ raise InvalidDocumentError(
+ "CachedReferenceFields is not allowed in EmbeddedDocuments")
+ if not f.document_type:
+ raise InvalidDocumentError(
+ "Document is not avaiable to sync")
+
+ if f.auto_sync:
+ f.start_listener()
+
+ f.document_type._cached_reference_fields.append(f)
+
+ if isinstance(f, ComplexBaseField) and hasattr(f, 'field'):
+ delete_rule = getattr(f.field,
+ 'reverse_delete_rule',
+ DO_NOTHING)
+ if isinstance(f, DictField) and delete_rule != DO_NOTHING:
+ msg = ("Reverse delete rules are not supported "
+ "for %s (field: %s)" %
+ (field.__class__.__name__, field.name))
+ raise InvalidDocumentError(msg)
+
+ f = field.field
+
+ if delete_rule != DO_NOTHING:
+ if issubclass(new_class, EmbeddedDocument):
+ msg = ("Reverse delete rules are not supported for "
+ "EmbeddedDocuments (field: %s)" % field.name)
+ raise InvalidDocumentError(msg)
+ f.document_type.register_delete_rule(new_class,
+ field.name, delete_rule)
+
+ if (field.name and hasattr(Document, field.name) and
+ EmbeddedDocument not in new_class.mro()):
+ msg = ("%s is a document method and not a valid "
+ "field name" % field.name)
+ raise InvalidDocumentError(msg)
+
+ return new_class
+
+ def add_to_class(self, name, value):
+ setattr(self, name, value)
+
+ @classmethod
+ def _get_bases(cls, bases):
+ if isinstance(bases, BasesTuple):
+ return bases
+ seen = []
+ bases = cls.__get_bases(bases)
+ unique_bases = (b for b in bases if not (b in seen or seen.append(b)))
+ return BasesTuple(unique_bases)
+
+ @classmethod
+ def __get_bases(cls, bases):
+ for base in bases:
+ if base is object:
+ continue
+ yield base
+ for child_base in cls.__get_bases(base.__bases__):
+ yield child_base
+
+ @classmethod
+ def _import_classes(cls):
+ Document = _import_class('Document')
+ EmbeddedDocument = _import_class('EmbeddedDocument')
+ DictField = _import_class('DictField')
+ CachedReferenceField = _import_class('CachedReferenceField')
+ return (Document, EmbeddedDocument, DictField, CachedReferenceField)
+
+
+class TopLevelDocumentMetaclass(DocumentMetaclass):
+
+ """Metaclass for top-level documents (i.e. documents that have their own
+ collection in the database.
+ """
+
+ def __new__(cls, name, bases, attrs):
+ flattened_bases = cls._get_bases(bases)
+ super_new = super(TopLevelDocumentMetaclass, cls).__new__
+
+ # Set default _meta data if base class, otherwise get user defined meta
+ if (attrs.get('my_metaclass') == TopLevelDocumentMetaclass):
+ # defaults
+ attrs['_meta'] = {
+ 'abstract': True,
+ 'max_documents': None,
+ 'max_size': None,
+ 'ordering': [], # default ordering applied at runtime
+ 'indexes': [], # indexes to be ensured at runtime
+ 'id_field': None,
+ 'index_background': False,
+ 'index_drop_dups': False,
+ 'index_opts': None,
+ 'delete_rules': None,
+ 'allow_inheritance': None,
+ }
+ attrs['_is_base_cls'] = True
+ attrs['_meta'].update(attrs.get('meta', {}))
+ else:
+ attrs['_meta'] = attrs.get('meta', {})
+ # Explictly set abstract to false unless set
+ attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False)
+ attrs['_is_base_cls'] = False
+
+ # Set flag marking as document class - as opposed to an object mixin
+ attrs['_is_document'] = True
+
+ # Ensure queryset_class is inherited
+ if 'objects' in attrs:
+ manager = attrs['objects']
+ if hasattr(manager, 'queryset_class'):
+ attrs['_meta']['queryset_class'] = manager.queryset_class
+
+ # Clean up top level meta
+ if 'meta' in attrs:
+ del(attrs['meta'])
+
+ # Find the parent document class
+ parent_doc_cls = [b for b in flattened_bases
+ if b.__class__ == TopLevelDocumentMetaclass]
+ parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0]
+
+ # Prevent classes setting collection different to their parents
+ # If parent wasn't an abstract class
+ if (parent_doc_cls and 'collection' in attrs.get('_meta', {})
+ and not parent_doc_cls._meta.get('abstract', True)):
+ msg = "Trying to set a collection on a subclass (%s)" % name
+ warnings.warn(msg, SyntaxWarning)
+ del(attrs['_meta']['collection'])
+
+ # Ensure abstract documents have abstract bases
+ if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'):
+ if (parent_doc_cls and
+ not parent_doc_cls._meta.get('abstract', False)):
+ msg = "Abstract document cannot have non-abstract base"
+ raise ValueError(msg)
+ return super_new(cls, name, bases, attrs)
+
+ # Merge base class metas.
+ # Uses a special MetaDict that handles various merging rules
+ meta = MetaDict()
+ for base in flattened_bases[::-1]:
+ # Add any mixin metadata from plain objects
+ if hasattr(base, 'meta'):
+ meta.merge(base.meta)
+ elif hasattr(base, '_meta'):
+ meta.merge(base._meta)
+
+ # Set collection in the meta if its callable
+ if (getattr(base, '_is_document', False) and
+ not base._meta.get('abstract')):
+ collection = meta.get('collection', None)
+ if callable(collection):
+ meta['collection'] = collection(base)
+
+ meta.merge(attrs.get('_meta', {})) # Top level meta
+
+ # Only simple classes (direct subclasses of Document)
+ # may set allow_inheritance to False
+ simple_class = all([b._meta.get('abstract')
+ for b in flattened_bases if hasattr(b, '_meta')])
+ if (not simple_class and meta['allow_inheritance'] is False and
+ not meta['abstract']):
+ raise ValueError('Only direct subclasses of Document may set '
+ '"allow_inheritance" to False')
+
+ # Set default collection name
+ if 'collection' not in meta:
+ meta['collection'] = ''.join('_%s' % c if c.isupper() else c
+ for c in name).strip('_').lower()
+ attrs['_meta'] = meta
+
+ # Call super and get the new class
+ new_class = super_new(cls, name, bases, attrs)
+
+ meta = new_class._meta
+
+ # Set index specifications
+ meta['index_specs'] = new_class._build_index_specs(meta['indexes'])
+
+ # If collection is a callable - call it and set the value
+ collection = meta.get('collection')
+ if callable(collection):
+ new_class._meta['collection'] = collection(new_class)
+
+ # Provide a default queryset unless exists or one has been set
+ if 'objects' not in dir(new_class):
+ new_class.objects = QuerySetManager()
+
+ # Validate the fields and set primary key if needed
+ for field_name, field in new_class._fields.iteritems():
+ if field.primary_key:
+ # Ensure only one primary key is set
+ current_pk = new_class._meta.get('id_field')
+ if current_pk and current_pk != field_name:
+ raise ValueError('Cannot override primary key field')
+
+ # Set primary key
+ if not current_pk:
+ new_class._meta['id_field'] = field_name
+ new_class.id = field
+
+ # Set primary key if not defined by the document
+ new_class._auto_id_field = getattr(parent_doc_cls,
+ '_auto_id_field', False)
+ if not new_class._meta.get('id_field'):
+ new_class._auto_id_field = True
+ new_class._meta['id_field'] = 'id'
+ new_class._fields['id'] = ObjectIdField(db_field='_id')
+ new_class._fields['id'].name = 'id'
+ new_class.id = new_class._fields['id']
+
+ # Prepend id field to _fields_ordered
+ if 'id' in new_class._fields and 'id' not in new_class._fields_ordered:
+ new_class._fields_ordered = ('id', ) + new_class._fields_ordered
+
+ # Merge in exceptions with parent hierarchy
+ exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned)
+ module = attrs.get('__module__')
+ for exc in exceptions_to_merge:
+ name = exc.__name__
+ parents = tuple(getattr(base, name) for base in flattened_bases
+ if hasattr(base, name)) or (exc,)
+ # Create new exception and set to new_class
+ exception = type(name, parents, {'__module__': module})
+ setattr(new_class, name, exception)
+
+ return new_class
+
+
+class MetaDict(dict):
+
+ """Custom dictionary for meta classes.
+ Handles the merging of set indexes
+ """
+ _merge_options = ('indexes',)
+
+ def merge(self, new_options):
+ for k, v in new_options.iteritems():
+ if k in self._merge_options:
+ self[k] = self.get(k, []) + v
+ else:
+ self[k] = v
+
+
+class BasesTuple(tuple):
+
+ """Special class to handle introspection of bases tuple in __new__"""
+ pass
diff --git a/awx/lib/site-packages/mongoengine/common.py b/awx/lib/site-packages/mongoengine/common.py
new file mode 100644
index 0000000000..3e63e98ee8
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/common.py
@@ -0,0 +1,55 @@
+_class_registry_cache = {}
+_field_list_cache = []
+
+
+def _import_class(cls_name):
+ """Cache mechanism for imports.
+
+ Due to complications of circular imports mongoengine needs to do lots of
+ inline imports in functions. This is inefficient as classes are
+ imported repeated throughout the mongoengine code. This is
+ compounded by some recursive functions requiring inline imports.
+
+ :mod:`mongoengine.common` provides a single point to import all these
+ classes. Circular imports aren't an issue as it dynamically imports the
+ class when first needed. Subsequent calls to the
+ :func:`~mongoengine.common._import_class` can then directly retrieve the
+ class from the :data:`mongoengine.common._class_registry_cache`.
+ """
+ if cls_name in _class_registry_cache:
+ return _class_registry_cache.get(cls_name)
+
+ doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument',
+ 'MapReduceDocument')
+
+ # Field Classes
+ if not _field_list_cache:
+ from mongoengine.fields import __all__ as fields
+ _field_list_cache.extend(fields)
+ from mongoengine.base.fields import __all__ as fields
+ _field_list_cache.extend(fields)
+
+ field_classes = _field_list_cache
+
+ queryset_classes = ('OperationError',)
+ deref_classes = ('DeReference',)
+
+ if cls_name in doc_classes:
+ from mongoengine import document as module
+ import_classes = doc_classes
+ elif cls_name in field_classes:
+ from mongoengine import fields as module
+ import_classes = field_classes
+ elif cls_name in queryset_classes:
+ from mongoengine import queryset as module
+ import_classes = queryset_classes
+ elif cls_name in deref_classes:
+ from mongoengine import dereference as module
+ import_classes = deref_classes
+ else:
+ raise ValueError('No import set for: ' % cls_name)
+
+ for cls in import_classes:
+ _class_registry_cache[cls] = getattr(module, cls)
+
+ return _class_registry_cache.get(cls_name)
diff --git a/awx/lib/site-packages/mongoengine/connection.py b/awx/lib/site-packages/mongoengine/connection.py
new file mode 100644
index 0000000000..dcecdd9ad2
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/connection.py
@@ -0,0 +1,169 @@
+import pymongo
+from pymongo import MongoClient, MongoReplicaSetClient, uri_parser
+
+
+__all__ = ['ConnectionError', 'connect', 'register_connection',
+ 'DEFAULT_CONNECTION_NAME']
+
+
+DEFAULT_CONNECTION_NAME = 'default'
+
+
+class ConnectionError(Exception):
+ pass
+
+
+_connection_settings = {}
+_connections = {}
+_dbs = {}
+
+
+def register_connection(alias, name=None, host=None, port=None,
+ read_preference=False,
+ username=None, password=None, authentication_source=None,
+ **kwargs):
+ """Add a connection.
+
+ :param alias: the name that will be used to refer to this connection
+ throughout MongoEngine
+ :param name: the name of the specific database to use
+ :param host: the host name of the :program:`mongod` instance to connect to
+ :param port: the port that the :program:`mongod` instance is running on
+ :param read_preference: The read preference for the collection
+ ** Added pymongo 2.1
+ :param username: username to authenticate with
+ :param password: password to authenticate with
+ :param authentication_source: database to authenticate against
+ :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
+
+ """
+ global _connection_settings
+
+ conn_settings = {
+ 'name': name or 'test',
+ 'host': host or 'localhost',
+ 'port': port or 27017,
+ 'read_preference': read_preference,
+ 'username': username,
+ 'password': password,
+ 'authentication_source': authentication_source
+ }
+
+ # Handle uri style connections
+ if "://" in conn_settings['host']:
+ uri_dict = uri_parser.parse_uri(conn_settings['host'])
+ conn_settings.update({
+ 'name': uri_dict.get('database') or name,
+ 'username': uri_dict.get('username'),
+ 'password': uri_dict.get('password'),
+ 'read_preference': read_preference,
+ })
+ if "replicaSet" in conn_settings['host']:
+ conn_settings['replicaSet'] = True
+
+ # Deprecated parameters that should not be passed on
+ kwargs.pop('slaves', None)
+ kwargs.pop('is_slave', None)
+
+ conn_settings.update(kwargs)
+ _connection_settings[alias] = conn_settings
+
+
+def disconnect(alias=DEFAULT_CONNECTION_NAME):
+ global _connections
+ global _dbs
+
+ if alias in _connections:
+ get_connection(alias=alias).disconnect()
+ del _connections[alias]
+ if alias in _dbs:
+ del _dbs[alias]
+
+
+def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
+ global _connections
+ # Connect to the database if not already connected
+ if reconnect:
+ disconnect(alias)
+
+ if alias not in _connections:
+ if alias not in _connection_settings:
+ msg = 'Connection with alias "%s" has not been defined' % alias
+ if alias == DEFAULT_CONNECTION_NAME:
+ msg = 'You have not defined a default connection'
+ raise ConnectionError(msg)
+ conn_settings = _connection_settings[alias].copy()
+
+ conn_settings.pop('name', None)
+ conn_settings.pop('username', None)
+ conn_settings.pop('password', None)
+ conn_settings.pop('authentication_source', None)
+
+ connection_class = MongoClient
+ if 'replicaSet' in conn_settings:
+ conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
+ # Discard port since it can't be used on MongoReplicaSetClient
+ conn_settings.pop('port', None)
+ # Discard replicaSet if not base string
+ if not isinstance(conn_settings['replicaSet'], basestring):
+ conn_settings.pop('replicaSet', None)
+ connection_class = MongoReplicaSetClient
+
+ try:
+ connection = None
+ # check for shared connections
+ connection_settings_iterator = ((db_alias, settings.copy()) for db_alias, settings in _connection_settings.iteritems())
+ for db_alias, connection_settings in connection_settings_iterator:
+ connection_settings.pop('name', None)
+ connection_settings.pop('username', None)
+ connection_settings.pop('password', None)
+ if conn_settings == connection_settings and _connections.get(db_alias, None):
+ connection = _connections[db_alias]
+ break
+
+ _connections[alias] = connection if connection else connection_class(**conn_settings)
+ except Exception, e:
+ raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
+ return _connections[alias]
+
+
+def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
+ global _dbs
+ if reconnect:
+ disconnect(alias)
+
+ if alias not in _dbs:
+ conn = get_connection(alias)
+ conn_settings = _connection_settings[alias]
+ db = conn[conn_settings['name']]
+ # Authenticate if necessary
+ if conn_settings['username'] and conn_settings['password']:
+ db.authenticate(conn_settings['username'],
+ conn_settings['password'],
+ source=conn_settings['authentication_source'])
+ _dbs[alias] = db
+ return _dbs[alias]
+
+
+def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs):
+ """Connect to the database specified by the 'db' argument.
+
+ Connection settings may be provided here as well if the database is not
+ running on the default port on localhost. If authentication is needed,
+ provide username and password arguments as well.
+
+ Multiple databases are supported by using aliases. Provide a separate
+ `alias` to connect to a different instance of :program:`mongod`.
+
+ .. versionchanged:: 0.6 - added multiple database support.
+ """
+ global _connections
+ if alias not in _connections:
+ register_connection(alias, db, **kwargs)
+
+ return get_connection(alias)
+
+
+# Support old naming convention
+_get_connection = get_connection
+_get_db = get_db
diff --git a/awx/lib/site-packages/mongoengine/context_managers.py b/awx/lib/site-packages/mongoengine/context_managers.py
new file mode 100644
index 0000000000..cc86006600
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/context_managers.py
@@ -0,0 +1,221 @@
+from mongoengine.common import _import_class
+from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db
+
+
+__all__ = ("switch_db", "switch_collection", "no_dereference",
+ "no_sub_classes", "query_counter")
+
+
+class switch_db(object):
+ """ switch_db alias context manager.
+
+ Example ::
+
+ # Register connections
+ register_connection('default', 'mongoenginetest')
+ register_connection('testdb-1', 'mongoenginetest2')
+
+ class Group(Document):
+ name = StringField()
+
+ Group(name="test").save() # Saves in the default db
+
+ with switch_db(Group, 'testdb-1') as Group:
+ Group(name="hello testdb!").save() # Saves in testdb-1
+
+ """
+
+ def __init__(self, cls, db_alias):
+ """ Construct the switch_db context manager
+
+ :param cls: the class to change the registered db
+ :param db_alias: the name of the specific database to use
+ """
+ self.cls = cls
+ self.collection = cls._get_collection()
+ self.db_alias = db_alias
+ self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)
+
+ def __enter__(self):
+ """ change the db_alias and clear the cached collection """
+ self.cls._meta["db_alias"] = self.db_alias
+ self.cls._collection = None
+ return self.cls
+
+ def __exit__(self, t, value, traceback):
+ """ Reset the db_alias and collection """
+ self.cls._meta["db_alias"] = self.ori_db_alias
+ self.cls._collection = self.collection
+
+
+class switch_collection(object):
+ """ switch_collection alias context manager.
+
+ Example ::
+
+ class Group(Document):
+ name = StringField()
+
+ Group(name="test").save() # Saves in the default db
+
+ with switch_collection(Group, 'group1') as Group:
+ Group(name="hello testdb!").save() # Saves in group1 collection
+
+ """
+
+ def __init__(self, cls, collection_name):
+ """ Construct the switch_collection context manager
+
+ :param cls: the class to change the registered db
+ :param collection_name: the name of the collection to use
+ """
+ self.cls = cls
+ self.ori_collection = cls._get_collection()
+ self.ori_get_collection_name = cls._get_collection_name
+ self.collection_name = collection_name
+
+ def __enter__(self):
+ """ change the _get_collection_name and clear the cached collection """
+
+ @classmethod
+ def _get_collection_name(cls):
+ return self.collection_name
+
+ self.cls._get_collection_name = _get_collection_name
+ self.cls._collection = None
+ return self.cls
+
+ def __exit__(self, t, value, traceback):
+ """ Reset the collection """
+ self.cls._collection = self.ori_collection
+ self.cls._get_collection_name = self.ori_get_collection_name
+
+
+class no_dereference(object):
+ """ no_dereference context manager.
+
+ Turns off all dereferencing in Documents for the duration of the context
+ manager::
+
+ with no_dereference(Group) as Group:
+ Group.objects.find()
+
+ """
+
+ def __init__(self, cls):
+ """ Construct the no_dereference context manager.
+
+ :param cls: the class to turn dereferencing off on
+ """
+ self.cls = cls
+
+ ReferenceField = _import_class('ReferenceField')
+ GenericReferenceField = _import_class('GenericReferenceField')
+ ComplexBaseField = _import_class('ComplexBaseField')
+
+ self.deref_fields = [k for k, v in self.cls._fields.iteritems()
+ if isinstance(v, (ReferenceField,
+ GenericReferenceField,
+ ComplexBaseField))]
+
+ def __enter__(self):
+ """ change the objects default and _auto_dereference values"""
+ for field in self.deref_fields:
+ self.cls._fields[field]._auto_dereference = False
+ return self.cls
+
+ def __exit__(self, t, value, traceback):
+ """ Reset the default and _auto_dereference values"""
+ for field in self.deref_fields:
+ self.cls._fields[field]._auto_dereference = True
+ return self.cls
+
+
+class no_sub_classes(object):
+ """ no_sub_classes context manager.
+
+ Only returns instances of this class and no sub (inherited) classes::
+
+ with no_sub_classes(Group) as Group:
+ Group.objects.find()
+
+ """
+
+ def __init__(self, cls):
+ """ Construct the no_sub_classes context manager.
+
+ :param cls: the class to turn querying sub classes on
+ """
+ self.cls = cls
+
+ def __enter__(self):
+ """ change the objects default and _auto_dereference values"""
+ self.cls._all_subclasses = self.cls._subclasses
+ self.cls._subclasses = (self.cls,)
+ return self.cls
+
+ def __exit__(self, t, value, traceback):
+ """ Reset the default and _auto_dereference values"""
+ self.cls._subclasses = self.cls._all_subclasses
+ delattr(self.cls, '_all_subclasses')
+ return self.cls
+
+
+class query_counter(object):
+ """ Query_counter context manager to get the number of queries. """
+
+ def __init__(self):
+ """ Construct the query_counter. """
+ self.counter = 0
+ self.db = get_db()
+
+ def __enter__(self):
+ """ On every with block we need to drop the profile collection. """
+ self.db.set_profiling_level(0)
+ self.db.system.profile.drop()
+ self.db.set_profiling_level(2)
+ return self
+
+ def __exit__(self, t, value, traceback):
+ """ Reset the profiling level. """
+ self.db.set_profiling_level(0)
+
+ def __eq__(self, value):
+ """ == Compare querycounter. """
+ counter = self._get_count()
+ return value == counter
+
+ def __ne__(self, value):
+ """ != Compare querycounter. """
+ return not self.__eq__(value)
+
+ def __lt__(self, value):
+ """ < Compare querycounter. """
+ return self._get_count() < value
+
+ def __le__(self, value):
+ """ <= Compare querycounter. """
+ return self._get_count() <= value
+
+ def __gt__(self, value):
+ """ > Compare querycounter. """
+ return self._get_count() > value
+
+ def __ge__(self, value):
+ """ >= Compare querycounter. """
+ return self._get_count() >= value
+
+ def __int__(self):
+ """ int representation. """
+ return self._get_count()
+
+ def __repr__(self):
+ """ repr query_counter as the number of queries. """
+ return u"%s" % self._get_count()
+
+ def _get_count(self):
+ """ Get the number of queries. """
+ ignore_query = {"ns": {"$ne": "%s.system.indexes" % self.db.name}}
+ count = self.db.system.profile.find(ignore_query).count() - self.counter
+ self.counter += 1
+ return count
diff --git a/awx/lib/site-packages/mongoengine/dereference.py b/awx/lib/site-packages/mongoengine/dereference.py
new file mode 100644
index 0000000000..415d56782f
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/dereference.py
@@ -0,0 +1,237 @@
+from bson import DBRef, SON
+
+from base import (
+ BaseDict, BaseList, EmbeddedDocumentList,
+ TopLevelDocumentMetaclass, get_document
+)
+from fields import (ReferenceField, ListField, DictField, MapField)
+from connection import get_db
+from queryset import QuerySet
+from document import Document, EmbeddedDocument
+
+
+class DeReference(object):
+
+ def __call__(self, items, max_depth=1, instance=None, name=None):
+ """
+ Cheaply dereferences the items to a set depth.
+ Also handles the conversion of complex data types.
+
+ :param items: The iterable (dict, list, queryset) to be dereferenced.
+ :param max_depth: The maximum depth to recurse to
+ :param instance: The owning instance used for tracking changes by
+ :class:`~mongoengine.base.ComplexBaseField`
+ :param name: The name of the field, used for tracking changes by
+ :class:`~mongoengine.base.ComplexBaseField`
+ :param get: A boolean determining if being called by __get__
+ """
+ if items is None or isinstance(items, basestring):
+ return items
+
+ # cheapest way to convert a queryset to a list
+ # list(queryset) uses a count() query to determine length
+ if isinstance(items, QuerySet):
+ items = [i for i in items]
+
+ self.max_depth = max_depth
+ doc_type = None
+
+ if instance and isinstance(instance, (Document, EmbeddedDocument,
+ TopLevelDocumentMetaclass)):
+ doc_type = instance._fields.get(name)
+ while hasattr(doc_type, 'field'):
+ doc_type = doc_type.field
+
+ if isinstance(doc_type, ReferenceField):
+ field = doc_type
+ doc_type = doc_type.document_type
+ is_list = not hasattr(items, 'items')
+
+ if is_list and all([i.__class__ == doc_type for i in items]):
+ return items
+ elif not is_list and all([i.__class__ == doc_type
+ for i in items.values()]):
+ return items
+ elif not field.dbref:
+ if not hasattr(items, 'items'):
+
+ def _get_items(items):
+ new_items = []
+ for v in items:
+ if isinstance(v, list):
+ new_items.append(_get_items(v))
+ elif not isinstance(v, (DBRef, Document)):
+ new_items.append(field.to_python(v))
+ else:
+ new_items.append(v)
+ return new_items
+
+ items = _get_items(items)
+ else:
+ items = dict([
+ (k, field.to_python(v))
+ if not isinstance(v, (DBRef, Document)) else (k, v)
+ for k, v in items.iteritems()]
+ )
+
+ self.reference_map = self._find_references(items)
+ self.object_map = self._fetch_objects(doc_type=doc_type)
+ return self._attach_objects(items, 0, instance, name)
+
+ def _find_references(self, items, depth=0):
+ """
+ Recursively finds all db references to be dereferenced
+
+ :param items: The iterable (dict, list, queryset)
+ :param depth: The current depth of recursion
+ """
+ reference_map = {}
+ if not items or depth >= self.max_depth:
+ return reference_map
+
+ # Determine the iterator to use
+ if not hasattr(items, 'items'):
+ iterator = enumerate(items)
+ else:
+ iterator = items.iteritems()
+
+ # Recursively find dbreferences
+ depth += 1
+ for k, item in iterator:
+ if isinstance(item, (Document, EmbeddedDocument)):
+ for field_name, field in item._fields.iteritems():
+ v = item._data.get(field_name, None)
+ if isinstance(v, (DBRef)):
+ reference_map.setdefault(field.document_type, []).append(v.id)
+ elif isinstance(v, (dict, SON)) and '_ref' in v:
+ reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id)
+ elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
+ field_cls = getattr(getattr(field, 'field', None), 'document_type', None)
+ references = self._find_references(v, depth)
+ for key, refs in references.iteritems():
+ if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)):
+ key = field_cls
+ reference_map.setdefault(key, []).extend(refs)
+ elif isinstance(item, (DBRef)):
+ reference_map.setdefault(item.collection, []).append(item.id)
+ elif isinstance(item, (dict, SON)) and '_ref' in item:
+ reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id)
+ elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth:
+ references = self._find_references(item, depth - 1)
+ for key, refs in references.iteritems():
+ reference_map.setdefault(key, []).extend(refs)
+
+ return reference_map
+
+ def _fetch_objects(self, doc_type=None):
+ """Fetch all references and convert to their document objects
+ """
+ object_map = {}
+ for collection, dbrefs in self.reference_map.iteritems():
+ keys = object_map.keys()
+ refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys]))
+ if hasattr(collection, 'objects'): # We have a document class for the refs
+ references = collection.objects.in_bulk(refs)
+ for key, doc in references.iteritems():
+ object_map[key] = doc
+ else: # Generic reference: use the refs data to convert to document
+ if isinstance(doc_type, (ListField, DictField, MapField,)):
+ continue
+
+ if doc_type:
+ references = doc_type._get_db()[collection].find({'_id': {'$in': refs}})
+ for ref in references:
+ doc = doc_type._from_son(ref)
+ object_map[doc.id] = doc
+ else:
+ references = get_db()[collection].find({'_id': {'$in': refs}})
+ for ref in references:
+ if '_cls' in ref:
+ doc = get_document(ref["_cls"])._from_son(ref)
+ elif doc_type is None:
+ doc = get_document(
+ ''.join(x.capitalize()
+ for x in collection.split('_')))._from_son(ref)
+ else:
+ doc = doc_type._from_son(ref)
+ object_map[doc.id] = doc
+ return object_map
+
+ def _attach_objects(self, items, depth=0, instance=None, name=None):
+ """
+ Recursively finds all db references to be dereferenced
+
+ :param items: The iterable (dict, list, queryset)
+ :param depth: The current depth of recursion
+ :param instance: The owning instance used for tracking changes by
+ :class:`~mongoengine.base.ComplexBaseField`
+ :param name: The name of the field, used for tracking changes by
+ :class:`~mongoengine.base.ComplexBaseField`
+ """
+ if not items:
+ if isinstance(items, (BaseDict, BaseList)):
+ return items
+
+ if instance:
+ if isinstance(items, dict):
+ return BaseDict(items, instance, name)
+ else:
+ return BaseList(items, instance, name)
+
+ if isinstance(items, (dict, SON)):
+ if '_ref' in items:
+ return self.object_map.get(items['_ref'].id, items)
+ elif '_cls' in items:
+ doc = get_document(items['_cls'])._from_son(items)
+ _cls = doc._data.pop('_cls', None)
+ del items['_cls']
+ doc._data = self._attach_objects(doc._data, depth, doc, None)
+ if _cls is not None:
+ doc._data['_cls'] = _cls
+ return doc
+
+ if not hasattr(items, 'items'):
+ is_list = True
+ list_type = BaseList
+ if isinstance(items, EmbeddedDocumentList):
+ list_type = EmbeddedDocumentList
+ as_tuple = isinstance(items, tuple)
+ iterator = enumerate(items)
+ data = []
+ else:
+ is_list = False
+ iterator = items.iteritems()
+ data = {}
+
+ depth += 1
+ for k, v in iterator:
+ if is_list:
+ data.append(v)
+ else:
+ data[k] = v
+
+ if k in self.object_map and not is_list:
+ data[k] = self.object_map[k]
+ elif isinstance(v, (Document, EmbeddedDocument)):
+ for field_name, field in v._fields.iteritems():
+ v = data[k]._data.get(field_name, None)
+ if isinstance(v, (DBRef)):
+ data[k]._data[field_name] = self.object_map.get(v.id, v)
+ elif isinstance(v, (dict, SON)) and '_ref' in v:
+ data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v)
+ elif isinstance(v, dict) and depth <= self.max_depth:
+ data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
+ elif isinstance(v, (list, tuple)) and depth <= self.max_depth:
+ data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name)
+ elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth:
+ item_name = '%s.%s' % (name, k) if name else name
+ data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name)
+ elif hasattr(v, 'id'):
+ data[k] = self.object_map.get(v.id, v)
+
+ if instance and name:
+ if is_list:
+ return tuple(data) if as_tuple else list_type(data, instance, name)
+ return BaseDict(data, instance, name)
+ depth += 1
+ return data
diff --git a/awx/lib/site-packages/mongoengine/django/__init__.py b/awx/lib/site-packages/mongoengine/django/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/awx/lib/site-packages/mongoengine/django/auth.py b/awx/lib/site-packages/mongoengine/django/auth.py
new file mode 100644
index 0000000000..0a309c4cd8
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/django/auth.py
@@ -0,0 +1,412 @@
+from mongoengine import *
+
+from django.utils.encoding import smart_str
+from django.contrib.auth.models import _user_has_perm, _user_get_all_permissions, _user_has_module_perms
+from django.db import models
+from django.contrib.contenttypes.models import ContentTypeManager
+from django.contrib import auth
+from django.contrib.auth.models import AnonymousUser
+from django.utils.translation import ugettext_lazy as _
+
+from .utils import datetime_now
+
+REDIRECT_FIELD_NAME = 'next'
+
+try:
+ from django.contrib.auth.hashers import check_password, make_password
+except ImportError:
+ """Handle older versions of Django"""
+ from django.utils.hashcompat import md5_constructor, sha_constructor
+
+ def get_hexdigest(algorithm, salt, raw_password):
+ raw_password, salt = smart_str(raw_password), smart_str(salt)
+ if algorithm == 'md5':
+ return md5_constructor(salt + raw_password).hexdigest()
+ elif algorithm == 'sha1':
+ return sha_constructor(salt + raw_password).hexdigest()
+ raise ValueError('Got unknown password algorithm type in password')
+
+ def check_password(raw_password, password):
+ algo, salt, hash = password.split('$')
+ return hash == get_hexdigest(algo, salt, raw_password)
+
+ def make_password(raw_password):
+ from random import random
+ algo = 'sha1'
+ salt = get_hexdigest(algo, str(random()), str(random()))[:5]
+ hash = get_hexdigest(algo, salt, raw_password)
+ return '%s$%s$%s' % (algo, salt, hash)
+
+
+class ContentType(Document):
+ name = StringField(max_length=100)
+ app_label = StringField(max_length=100)
+ model = StringField(max_length=100, verbose_name=_('python model class name'),
+ unique_with='app_label')
+ objects = ContentTypeManager()
+
+ class Meta:
+ verbose_name = _('content type')
+ verbose_name_plural = _('content types')
+ # db_table = 'django_content_type'
+ # ordering = ('name',)
+ # unique_together = (('app_label', 'model'),)
+
+ def __unicode__(self):
+ return self.name
+
+ def model_class(self):
+ "Returns the Python model class for this type of content."
+ from django.db import models
+ return models.get_model(self.app_label, self.model)
+
+ def get_object_for_this_type(self, **kwargs):
+ """
+ Returns an object of this type for the keyword arguments given.
+ Basically, this is a proxy around this object_type's get_object() model
+ method. The ObjectNotExist exception, if thrown, will not be caught,
+ so code that calls this method should catch it.
+ """
+ return self.model_class()._default_manager.using(self._state.db).get(**kwargs)
+
+ def natural_key(self):
+ return (self.app_label, self.model)
+
+
+class SiteProfileNotAvailable(Exception):
+ pass
+
+
+class PermissionManager(models.Manager):
+ def get_by_natural_key(self, codename, app_label, model):
+ return self.get(
+ codename=codename,
+ content_type=ContentType.objects.get_by_natural_key(app_label, model)
+ )
+
+
+class Permission(Document):
+ """The permissions system provides a way to assign permissions to specific
+ users and groups of users.
+
+ The permission system is used by the Django admin site, but may also be
+ useful in your own code. The Django admin site uses permissions as follows:
+
+ - The "add" permission limits the user's ability to view the "add"
+ form and add an object.
+ - The "change" permission limits a user's ability to view the change
+ list, view the "change" form and change an object.
+ - The "delete" permission limits the ability to delete an object.
+
+ Permissions are set globally per type of object, not per specific object
+ instance. It is possible to say "Mary may change news stories," but it's
+ not currently possible to say "Mary may change news stories, but only the
+ ones she created herself" or "Mary may only change news stories that have
+ a certain status or publication date."
+
+ Three basic permissions -- add, change and delete -- are automatically
+ created for each Django model.
+ """
+ name = StringField(max_length=50, verbose_name=_('username'))
+ content_type = ReferenceField(ContentType)
+ codename = StringField(max_length=100, verbose_name=_('codename'))
+ # FIXME: don't access field of the other class
+ # unique_with=['content_type__app_label', 'content_type__model'])
+
+ objects = PermissionManager()
+
+ class Meta:
+ verbose_name = _('permission')
+ verbose_name_plural = _('permissions')
+ # unique_together = (('content_type', 'codename'),)
+ # ordering = ('content_type__app_label', 'content_type__model', 'codename')
+
+ def __unicode__(self):
+ return u"%s | %s | %s" % (
+ unicode(self.content_type.app_label),
+ unicode(self.content_type),
+ unicode(self.name))
+
+ def natural_key(self):
+ return (self.codename,) + self.content_type.natural_key()
+ natural_key.dependencies = ['contenttypes.contenttype']
+
+
+class Group(Document):
+ """Groups are a generic way of categorizing users to apply permissions,
+ or some other label, to those users. A user can belong to any number of
+ groups.
+
+ A user in a group automatically has all the permissions granted to that
+ group. For example, if the group Site editors has the permission
+ can_edit_home_page, any user in that group will have that permission.
+
+ Beyond permissions, groups are a convenient way to categorize users to
+ apply some label, or extended functionality, to them. For example, you
+ could create a group 'Special users', and you could write code that would
+ do special things to those users -- such as giving them access to a
+ members-only portion of your site, or sending them members-only
+ e-mail messages.
+ """
+ name = StringField(max_length=80, unique=True, verbose_name=_('name'))
+ permissions = ListField(ReferenceField(Permission, verbose_name=_('permissions'), required=False))
+
+ class Meta:
+ verbose_name = _('group')
+ verbose_name_plural = _('groups')
+
+ def __unicode__(self):
+ return self.name
+
+
+class UserManager(models.Manager):
+ def create_user(self, username, email, password=None):
+ """
+ Creates and saves a User with the given username, e-mail and password.
+ """
+ now = datetime_now()
+
+ # Normalize the address by lowercasing the domain part of the email
+ # address.
+ try:
+ email_name, domain_part = email.strip().split('@', 1)
+ except ValueError:
+ pass
+ else:
+ email = '@'.join([email_name, domain_part.lower()])
+
+ user = self.model(username=username, email=email, is_staff=False,
+ is_active=True, is_superuser=False, last_login=now,
+ date_joined=now)
+
+ user.set_password(password)
+ user.save(using=self._db)
+ return user
+
+ def create_superuser(self, username, email, password):
+ u = self.create_user(username, email, password)
+ u.is_staff = True
+ u.is_active = True
+ u.is_superuser = True
+ u.save(using=self._db)
+ return u
+
+ def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
+ "Generates a random password with the given length and given allowed_chars"
+ # Note that default value of allowed_chars does not have "I" or letters
+ # that look like it -- just to avoid confusion.
+ from random import choice
+ return ''.join([choice(allowed_chars) for i in range(length)])
+
+
+class User(Document):
+ """A User document that aims to mirror most of the API specified by Django
+ at http://docs.djangoproject.com/en/dev/topics/auth/#users
+ """
+ username = StringField(max_length=30, required=True,
+ verbose_name=_('username'),
+ help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters"))
+
+ first_name = StringField(max_length=30,
+ verbose_name=_('first name'))
+
+ last_name = StringField(max_length=30,
+ verbose_name=_('last name'))
+ email = EmailField(verbose_name=_('e-mail address'))
+ password = StringField(max_length=128,
+ verbose_name=_('password'),
+ help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the change password form."))
+ is_staff = BooleanField(default=False,
+ verbose_name=_('staff status'),
+ help_text=_("Designates whether the user can log into this admin site."))
+ is_active = BooleanField(default=True,
+ verbose_name=_('active'),
+ help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts."))
+ is_superuser = BooleanField(default=False,
+ verbose_name=_('superuser status'),
+ help_text=_("Designates that this user has all permissions without explicitly assigning them."))
+ last_login = DateTimeField(default=datetime_now,
+ verbose_name=_('last login'))
+ date_joined = DateTimeField(default=datetime_now,
+ verbose_name=_('date joined'))
+
+ user_permissions = ListField(ReferenceField(Permission), verbose_name=_('user permissions'),
+ help_text=_('Permissions for the user.'))
+
+ USERNAME_FIELD = 'username'
+ REQUIRED_FIELDS = ['email']
+
+ meta = {
+ 'allow_inheritance': True,
+ 'indexes': [
+ {'fields': ['username'], 'unique': True, 'sparse': True}
+ ]
+ }
+
+ def __unicode__(self):
+ return self.username
+
+ def get_full_name(self):
+ """Returns the users first and last names, separated by a space.
+ """
+ full_name = u'%s %s' % (self.first_name or '', self.last_name or '')
+ return full_name.strip()
+
+ def is_anonymous(self):
+ return False
+
+ def is_authenticated(self):
+ return True
+
+ def set_password(self, raw_password):
+ """Sets the user's password - always use this rather than directly
+ assigning to :attr:`~mongoengine.django.auth.User.password` as the
+ password is hashed before storage.
+ """
+ self.password = make_password(raw_password)
+ self.save()
+ return self
+
+ def check_password(self, raw_password):
+ """Checks the user's password against a provided password - always use
+ this rather than directly comparing to
+ :attr:`~mongoengine.django.auth.User.password` as the password is
+ hashed before storage.
+ """
+ return check_password(raw_password, self.password)
+
+ @classmethod
+ def create_user(cls, username, password, email=None):
+ """Create (and save) a new user with the given username, password and
+ email address.
+ """
+ now = datetime_now()
+
+ # Normalize the address by lowercasing the domain part of the email
+ # address.
+ if email is not None:
+ try:
+ email_name, domain_part = email.strip().split('@', 1)
+ except ValueError:
+ pass
+ else:
+ email = '@'.join([email_name, domain_part.lower()])
+
+ user = cls(username=username, email=email, date_joined=now)
+ user.set_password(password)
+ user.save()
+ return user
+
+ def get_group_permissions(self, obj=None):
+ """
+ Returns a list of permission strings that this user has through his/her
+ groups. This method queries all available auth backends. If an object
+ is passed in, only permissions matching this object are returned.
+ """
+ permissions = set()
+ for backend in auth.get_backends():
+ if hasattr(backend, "get_group_permissions"):
+ permissions.update(backend.get_group_permissions(self, obj))
+ return permissions
+
+ def get_all_permissions(self, obj=None):
+ return _user_get_all_permissions(self, obj)
+
+ def has_perm(self, perm, obj=None):
+ """
+ Returns True if the user has the specified permission. This method
+ queries all available auth backends, but returns immediately if any
+ backend returns True. Thus, a user who has permission from a single
+ auth backend is assumed to have permission in general. If an object is
+ provided, permissions for this specific object are checked.
+ """
+
+ # Active superusers have all permissions.
+ if self.is_active and self.is_superuser:
+ return True
+
+ # Otherwise we need to check the backends.
+ return _user_has_perm(self, perm, obj)
+
+ def has_module_perms(self, app_label):
+ """
+ Returns True if the user has any permissions in the given app label.
+ Uses pretty much the same logic as has_perm, above.
+ """
+ # Active superusers have all permissions.
+ if self.is_active and self.is_superuser:
+ return True
+
+ return _user_has_module_perms(self, app_label)
+
+ def email_user(self, subject, message, from_email=None):
+ "Sends an e-mail to this User."
+ from django.core.mail import send_mail
+ send_mail(subject, message, from_email, [self.email])
+
+ def get_profile(self):
+ """
+ Returns site-specific profile for this user. Raises
+ SiteProfileNotAvailable if this site does not allow profiles.
+ """
+ if not hasattr(self, '_profile_cache'):
+ from django.conf import settings
+ if not getattr(settings, 'AUTH_PROFILE_MODULE', False):
+ raise SiteProfileNotAvailable('You need to set AUTH_PROFILE_MO'
+ 'DULE in your project settings')
+ try:
+ app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
+ except ValueError:
+ raise SiteProfileNotAvailable('app_label and model_name should'
+ ' be separated by a dot in the AUTH_PROFILE_MODULE set'
+ 'ting')
+
+ try:
+ model = models.get_model(app_label, model_name)
+ if model is None:
+ raise SiteProfileNotAvailable('Unable to load the profile '
+ 'model, check AUTH_PROFILE_MODULE in your project sett'
+ 'ings')
+ self._profile_cache = model._default_manager.using(self._state.db).get(user__id__exact=self.id)
+ self._profile_cache.user = self
+ except (ImportError, ImproperlyConfigured):
+ raise SiteProfileNotAvailable
+ return self._profile_cache
+
+
+class MongoEngineBackend(object):
+ """Authenticate using MongoEngine and mongoengine.django.auth.User.
+ """
+
+ supports_object_permissions = False
+ supports_anonymous_user = False
+ supports_inactive_user = False
+ _user_doc = False
+
+ def authenticate(self, username=None, password=None):
+ user = self.user_document.objects(username=username).first()
+ if user:
+ if password and user.check_password(password):
+ backend = auth.get_backends()[0]
+ user.backend = "%s.%s" % (backend.__module__, backend.__class__.__name__)
+ return user
+ return None
+
+ def get_user(self, user_id):
+ return self.user_document.objects.with_id(user_id)
+
+ @property
+ def user_document(self):
+ if self._user_doc is False:
+ from .mongo_auth.models import get_user_document
+ self._user_doc = get_user_document()
+ return self._user_doc
+
+def get_user(userid):
+ """Returns a User object from an id (User.id). Django's equivalent takes
+ request, but taking an id instead leaves it up to the developer to store
+ the id in any way they want (session, signed cookie, etc.)
+ """
+ if not userid:
+ return AnonymousUser()
+ return MongoEngineBackend().get_user(userid) or AnonymousUser()
diff --git a/awx/lib/site-packages/mongoengine/django/mongo_auth/__init__.py b/awx/lib/site-packages/mongoengine/django/mongo_auth/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/awx/lib/site-packages/mongoengine/django/mongo_auth/models.py b/awx/lib/site-packages/mongoengine/django/mongo_auth/models.py
new file mode 100644
index 0000000000..ad4ceff8df
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/django/mongo_auth/models.py
@@ -0,0 +1,119 @@
+from django.conf import settings
+from django.contrib.auth.hashers import make_password
+from django.contrib.auth.models import UserManager
+from django.core.exceptions import ImproperlyConfigured
+from django.db import models
+try:
+ from django.utils.module_loading import import_module
+except ImportError:
+ """Handle older versions of Django"""
+ from django.utils.importlib import import_module
+from django.utils.translation import ugettext_lazy as _
+
+
+__all__ = (
+ 'get_user_document',
+)
+
+
+MONGOENGINE_USER_DOCUMENT = getattr(
+ settings, 'MONGOENGINE_USER_DOCUMENT', 'mongoengine.django.auth.User')
+
+
+def get_user_document():
+ """Get the user document class used for authentication.
+
+ This is the class defined in settings.MONGOENGINE_USER_DOCUMENT, which
+ defaults to `mongoengine.django.auth.User`.
+
+ """
+
+ name = MONGOENGINE_USER_DOCUMENT
+ dot = name.rindex('.')
+ module = import_module(name[:dot])
+ return getattr(module, name[dot + 1:])
+
+
+class MongoUserManager(UserManager):
+ """A User manager wich allows the use of MongoEngine documents in Django.
+
+ To use the manager, you must tell django.contrib.auth to use MongoUser as
+ the user model. In you settings.py, you need:
+
+ INSTALLED_APPS = (
+ ...
+ 'django.contrib.auth',
+ 'mongoengine.django.mongo_auth',
+ ...
+ )
+ AUTH_USER_MODEL = 'mongo_auth.MongoUser'
+
+ Django will use the model object to access the custom Manager, which will
+ replace the original queryset with MongoEngine querysets.
+
+ By default, mongoengine.django.auth.User will be used to store users. You
+ can specify another document class in MONGOENGINE_USER_DOCUMENT in your
+ settings.py.
+
+ The User Document class has the same requirements as a standard custom user
+ model: https://docs.djangoproject.com/en/dev/topics/auth/customizing/
+
+ In particular, the User Document class must define USERNAME_FIELD and
+ REQUIRED_FIELDS.
+
+ `AUTH_USER_MODEL` has been added in Django 1.5.
+
+ """
+
+ def contribute_to_class(self, model, name):
+ super(MongoUserManager, self).contribute_to_class(model, name)
+ self.dj_model = self.model
+ self.model = get_user_document()
+
+ self.dj_model.USERNAME_FIELD = self.model.USERNAME_FIELD
+ username = models.CharField(_('username'), max_length=30, unique=True)
+ username.contribute_to_class(self.dj_model, self.dj_model.USERNAME_FIELD)
+
+ self.dj_model.REQUIRED_FIELDS = self.model.REQUIRED_FIELDS
+ for name in self.dj_model.REQUIRED_FIELDS:
+ field = models.CharField(_(name), max_length=30)
+ field.contribute_to_class(self.dj_model, name)
+
+
+ def get(self, *args, **kwargs):
+ try:
+ return self.get_query_set().get(*args, **kwargs)
+ except self.model.DoesNotExist:
+ # ModelBackend expects this exception
+ raise self.dj_model.DoesNotExist
+
+ @property
+ def db(self):
+ raise NotImplementedError
+
+ def get_empty_query_set(self):
+ return self.model.objects.none()
+
+ def get_query_set(self):
+ return self.model.objects
+
+
+class MongoUser(models.Model):
+ """"Dummy user model for Django.
+
+ MongoUser is used to replace Django's UserManager with MongoUserManager.
+ The actual user document class is mongoengine.django.auth.User or any
+ other document class specified in MONGOENGINE_USER_DOCUMENT.
+
+ To get the user document class, use `get_user_document()`.
+
+ """
+
+ objects = MongoUserManager()
+
+ class Meta:
+ app_label = 'mongo_auth'
+
+ def set_password(self, password):
+ """Doesn't do anything, but works around the issue with Django 1.6."""
+ make_password(password)
diff --git a/awx/lib/site-packages/mongoengine/django/sessions.py b/awx/lib/site-packages/mongoengine/django/sessions.py
new file mode 100644
index 0000000000..f260951beb
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/django/sessions.py
@@ -0,0 +1,124 @@
+from bson import json_util
+from django.conf import settings
+from django.contrib.sessions.backends.base import SessionBase, CreateError
+from django.core.exceptions import SuspiciousOperation
+try:
+ from django.utils.encoding import force_unicode
+except ImportError:
+ from django.utils.encoding import force_text as force_unicode
+
+from mongoengine.document import Document
+from mongoengine import fields
+from mongoengine.queryset import OperationError
+from mongoengine.connection import DEFAULT_CONNECTION_NAME
+
+from .utils import datetime_now
+
+
+MONGOENGINE_SESSION_DB_ALIAS = getattr(
+ settings, 'MONGOENGINE_SESSION_DB_ALIAS',
+ DEFAULT_CONNECTION_NAME)
+
+# a setting for the name of the collection used to store sessions
+MONGOENGINE_SESSION_COLLECTION = getattr(
+ settings, 'MONGOENGINE_SESSION_COLLECTION',
+ 'django_session')
+
+# a setting for whether session data is stored encoded or not
+MONGOENGINE_SESSION_DATA_ENCODE = getattr(
+ settings, 'MONGOENGINE_SESSION_DATA_ENCODE',
+ True)
+
+
+class MongoSession(Document):
+ session_key = fields.StringField(primary_key=True, max_length=40)
+ session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \
+ else fields.DictField()
+ expire_date = fields.DateTimeField()
+
+ meta = {
+ 'collection': MONGOENGINE_SESSION_COLLECTION,
+ 'db_alias': MONGOENGINE_SESSION_DB_ALIAS,
+ 'allow_inheritance': False,
+ 'indexes': [
+ {
+ 'fields': ['expire_date'],
+ 'expireAfterSeconds': 0
+ }
+ ]
+ }
+
+ def get_decoded(self):
+ return SessionStore().decode(self.session_data)
+
+
+class SessionStore(SessionBase):
+ """A MongoEngine-based session store for Django.
+ """
+
+ def _get_session(self, *args, **kwargs):
+ sess = super(SessionStore, self)._get_session(*args, **kwargs)
+ if sess.get('_auth_user_id', None):
+ sess['_auth_user_id'] = str(sess.get('_auth_user_id'))
+ return sess
+
+ def load(self):
+ try:
+ s = MongoSession.objects(session_key=self.session_key,
+ expire_date__gt=datetime_now)[0]
+ if MONGOENGINE_SESSION_DATA_ENCODE:
+ return self.decode(force_unicode(s.session_data))
+ else:
+ return s.session_data
+ except (IndexError, SuspiciousOperation):
+ self.create()
+ return {}
+
+ def exists(self, session_key):
+ return bool(MongoSession.objects(session_key=session_key).first())
+
+ def create(self):
+ while True:
+ self._session_key = self._get_new_session_key()
+ try:
+ self.save(must_create=True)
+ except CreateError:
+ continue
+ self.modified = True
+ self._session_cache = {}
+ return
+
+ def save(self, must_create=False):
+ if self.session_key is None:
+ self._session_key = self._get_new_session_key()
+ s = MongoSession(session_key=self.session_key)
+ if MONGOENGINE_SESSION_DATA_ENCODE:
+ s.session_data = self.encode(self._get_session(no_load=must_create))
+ else:
+ s.session_data = self._get_session(no_load=must_create)
+ s.expire_date = self.get_expiry_date()
+ try:
+ s.save(force_insert=must_create)
+ except OperationError:
+ if must_create:
+ raise CreateError
+ raise
+
+ def delete(self, session_key=None):
+ if session_key is None:
+ if self.session_key is None:
+ return
+ session_key = self.session_key
+ MongoSession.objects(session_key=session_key).delete()
+
+
+class BSONSerializer(object):
+ """
+ Serializer that can handle BSON types (eg ObjectId).
+ """
+ def dumps(self, obj):
+ return json_util.dumps(obj, separators=(',', ':')).encode('ascii')
+
+ def loads(self, data):
+ return json_util.loads(data.decode('ascii'))
+
diff --git a/awx/lib/site-packages/mongoengine/django/shortcuts.py b/awx/lib/site-packages/mongoengine/django/shortcuts.py
new file mode 100644
index 0000000000..9cc8370b00
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/django/shortcuts.py
@@ -0,0 +1,47 @@
+from mongoengine.queryset import QuerySet
+from mongoengine.base import BaseDocument
+from mongoengine.errors import ValidationError
+
+def _get_queryset(cls):
+ """Inspired by django.shortcuts.*"""
+ if isinstance(cls, QuerySet):
+ return cls
+ else:
+ return cls.objects
+
+def get_document_or_404(cls, *args, **kwargs):
+ """
+ Uses get() to return an document, or raises a Http404 exception if the document
+ does not exist.
+
+ cls may be a Document or QuerySet object. All other passed
+ arguments and keyword arguments are used in the get() query.
+
+ Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
+ object is found.
+
+ Inspired by django.shortcuts.*
+ """
+ queryset = _get_queryset(cls)
+ try:
+ return queryset.get(*args, **kwargs)
+ except (queryset._document.DoesNotExist, ValidationError):
+ from django.http import Http404
+ raise Http404('No %s matches the given query.' % queryset._document._class_name)
+
+def get_list_or_404(cls, *args, **kwargs):
+ """
+ Uses filter() to return a list of documents, or raise a Http404 exception if
+ the list is empty.
+
+ cls may be a Document or QuerySet object. All other passed
+ arguments and keyword arguments are used in the filter() query.
+
+ Inspired by django.shortcuts.*
+ """
+ queryset = _get_queryset(cls)
+ obj_list = list(queryset.filter(*args, **kwargs))
+ if not obj_list:
+ from django.http import Http404
+ raise Http404('No %s matches the given query.' % queryset._document._class_name)
+ return obj_list
diff --git a/awx/lib/site-packages/mongoengine/django/storage.py b/awx/lib/site-packages/mongoengine/django/storage.py
new file mode 100644
index 0000000000..9df6f9e80c
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/django/storage.py
@@ -0,0 +1,112 @@
+import os
+import itertools
+import urlparse
+
+from mongoengine import *
+from django.conf import settings
+from django.core.files.storage import Storage
+from django.core.exceptions import ImproperlyConfigured
+
+
+class FileDocument(Document):
+ """A document used to store a single file in GridFS.
+ """
+ file = FileField()
+
+
+class GridFSStorage(Storage):
+ """A custom storage backend to store files in GridFS
+ """
+
+ def __init__(self, base_url=None):
+
+ if base_url is None:
+ base_url = settings.MEDIA_URL
+ self.base_url = base_url
+ self.document = FileDocument
+ self.field = 'file'
+
+ def delete(self, name):
+ """Deletes the specified file from the storage system.
+ """
+ if self.exists(name):
+ doc = self.document.objects.first()
+ field = getattr(doc, self.field)
+ self._get_doc_with_name(name).delete() # Delete the FileField
+ field.delete() # Delete the FileDocument
+
+ def exists(self, name):
+ """Returns True if a file referened by the given name already exists in the
+ storage system, or False if the name is available for a new file.
+ """
+ doc = self._get_doc_with_name(name)
+ if doc:
+ field = getattr(doc, self.field)
+ return bool(field.name)
+ else:
+ return False
+
+ def listdir(self, path=None):
+ """Lists the contents of the specified path, returning a 2-tuple of lists;
+ the first item being directories, the second item being files.
+ """
+ def name(doc):
+ return getattr(doc, self.field).name
+ docs = self.document.objects
+ return [], [name(d) for d in docs if name(d)]
+
+ def size(self, name):
+ """Returns the total size, in bytes, of the file specified by name.
+ """
+ doc = self._get_doc_with_name(name)
+ if doc:
+ return getattr(doc, self.field).length
+ else:
+ raise ValueError("No such file or directory: '%s'" % name)
+
+ def url(self, name):
+ """Returns an absolute URL where the file's contents can be accessed
+ directly by a web browser.
+ """
+ if self.base_url is None:
+ raise ValueError("This file is not accessible via a URL.")
+ return urlparse.urljoin(self.base_url, name).replace('\\', '/')
+
+ def _get_doc_with_name(self, name):
+ """Find the documents in the store with the given name
+ """
+ docs = self.document.objects
+ doc = [d for d in docs if hasattr(getattr(d, self.field), 'name') and getattr(d, self.field).name == name]
+ if doc:
+ return doc[0]
+ else:
+ return None
+
+ def _open(self, name, mode='rb'):
+ doc = self._get_doc_with_name(name)
+ if doc:
+ return getattr(doc, self.field)
+ else:
+ raise ValueError("No file found with the name '%s'." % name)
+
+ def get_available_name(self, name):
+ """Returns a filename that's free on the target storage system, and
+ available for new content to be written to.
+ """
+ file_root, file_ext = os.path.splitext(name)
+ # If the filename already exists, add an underscore and a number (before
+ # the file extension, if one exists) to the filename until the generated
+ # filename doesn't exist.
+ count = itertools.count(1)
+ while self.exists(name):
+ # file_ext includes the dot.
+ name = os.path.join("%s_%s%s" % (file_root, count.next(), file_ext))
+
+ return name
+
+ def _save(self, name, content):
+ doc = self.document()
+ getattr(doc, self.field).put(content, filename=name)
+ doc.save()
+
+ return name
diff --git a/awx/lib/site-packages/mongoengine/django/tests.py b/awx/lib/site-packages/mongoengine/django/tests.py
new file mode 100644
index 0000000000..b130acc824
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/django/tests.py
@@ -0,0 +1,31 @@
+#coding: utf-8
+
+from unittest import TestCase
+
+from mongoengine import connect
+from mongoengine.connection import get_db
+
+
+class MongoTestCase(TestCase):
+ """
+ TestCase class that clear the collection between the tests
+ """
+
+ @property
+ def db_name(self):
+ from django.conf import settings
+ return 'test_%s' % getattr(settings, 'MONGO_DATABASE_NAME', 'dummy')
+
+ def __init__(self, methodName='runtest'):
+ connect(self.db_name)
+ self.db = get_db()
+ super(MongoTestCase, self).__init__(methodName)
+
+ def dropCollections(self):
+ for collection in self.db.collection_names():
+ if collection.startswith('system.'):
+ continue
+ self.db.drop_collection(collection)
+
+ def tearDown(self):
+ self.dropCollections()
diff --git a/awx/lib/site-packages/mongoengine/django/utils.py b/awx/lib/site-packages/mongoengine/django/utils.py
new file mode 100644
index 0000000000..d3ef8a4b77
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/django/utils.py
@@ -0,0 +1,6 @@
+try:
+ # django >= 1.4
+ from django.utils.timezone import now as datetime_now
+except ImportError:
+ from datetime import datetime
+ datetime_now = datetime.now
diff --git a/awx/lib/site-packages/mongoengine/document.py b/awx/lib/site-packages/mongoengine/document.py
new file mode 100644
index 0000000000..eea5dabc1a
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/document.py
@@ -0,0 +1,869 @@
+import warnings
+
+import hashlib
+import pymongo
+import re
+
+from pymongo.read_preferences import ReadPreference
+from bson import ObjectId
+from bson.dbref import DBRef
+from mongoengine import signals
+from mongoengine.common import _import_class
+from mongoengine.base import (
+ DocumentMetaclass,
+ TopLevelDocumentMetaclass,
+ BaseDocument,
+ BaseDict,
+ BaseList,
+ EmbeddedDocumentList,
+ ALLOW_INHERITANCE,
+ get_document
+)
+from mongoengine.errors import ValidationError, InvalidQueryError, InvalidDocumentError
+from mongoengine.queryset import (OperationError, NotUniqueError,
+ QuerySet, transform)
+from mongoengine.connection import get_db, DEFAULT_CONNECTION_NAME
+from mongoengine.context_managers import switch_db, switch_collection
+
+__all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument',
+ 'DynamicEmbeddedDocument', 'OperationError',
+ 'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument')
+
+
+def includes_cls(fields):
+ """ Helper function used for ensuring and comparing indexes
+ """
+
+ first_field = None
+ if len(fields):
+ if isinstance(fields[0], basestring):
+ first_field = fields[0]
+ elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
+ first_field = fields[0][0]
+ return first_field == '_cls'
+
+
+class InvalidCollectionError(Exception):
+ pass
+
+
+class EmbeddedDocument(BaseDocument):
+
+ """A :class:`~mongoengine.Document` that isn't stored in its own
+ collection. :class:`~mongoengine.EmbeddedDocument`\ s should be used as
+ fields on :class:`~mongoengine.Document`\ s through the
+ :class:`~mongoengine.EmbeddedDocumentField` field type.
+
+ A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed,
+ to create a specialised version of the embedded document that will be
+ stored in the same collection. To facilitate this behaviour a `_cls`
+ field is added to documents (hidden though the MongoEngine interface).
+ To disable this behaviour and remove the dependence on the presence of
+ `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
+ dictionary.
+ """
+
+ __slots__ = ('_instance')
+
+ # The __metaclass__ attribute is removed by 2to3 when running with Python3
+ # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
+ my_metaclass = DocumentMetaclass
+ __metaclass__ = DocumentMetaclass
+
+ def __init__(self, *args, **kwargs):
+ super(EmbeddedDocument, self).__init__(*args, **kwargs)
+ self._instance = None
+ self._changed_fields = []
+
+ def __eq__(self, other):
+ if isinstance(other, self.__class__):
+ return self._data == other._data
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def save(self, *args, **kwargs):
+ self._instance.save(*args, **kwargs)
+
+ def reload(self, *args, **kwargs):
+ self._instance.reload(*args, **kwargs)
+
+
+class Document(BaseDocument):
+
+ """The base class used for defining the structure and properties of
+ collections of documents stored in MongoDB. Inherit from this class, and
+ add fields as class attributes to define a document's structure.
+ Individual documents may then be created by making instances of the
+ :class:`~mongoengine.Document` subclass.
+
+ By default, the MongoDB collection used to store documents created using a
+ :class:`~mongoengine.Document` subclass will be the name of the subclass
+ converted to lowercase. A different collection may be specified by
+ providing :attr:`collection` to the :attr:`meta` dictionary in the class
+ definition.
+
+ A :class:`~mongoengine.Document` subclass may be itself subclassed, to
+ create a specialised version of the document that will be stored in the
+ same collection. To facilitate this behaviour a `_cls`
+ field is added to documents (hidden though the MongoEngine interface).
+ To disable this behaviour and remove the dependence on the presence of
+ `_cls` set :attr:`allow_inheritance` to ``False`` in the :attr:`meta`
+ dictionary.
+
+ A :class:`~mongoengine.Document` may use a **Capped Collection** by
+ specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
+ dictionary. :attr:`max_documents` is the maximum number of documents that
+ is allowed to be stored in the collection, and :attr:`max_size` is the
+ maximum size of the collection in bytes. If :attr:`max_size` is not
+ specified and :attr:`max_documents` is, :attr:`max_size` defaults to
+ 10000000 bytes (10MB).
+
+ Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
+ dictionary. The value should be a list of field names or tuples of field
+ names. Index direction may be specified by prefixing the field names with
+ a **+** or **-** sign.
+
+ Automatic index creation can be disabled by specifying
+ :attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
+ False then indexes will not be created by MongoEngine. This is useful in
+ production systems where index creation is performed as part of a
+ deployment system.
+
+ By default, _cls will be added to the start of every index (that
+ doesn't contain a list) if allow_inheritance is True. This can be
+ disabled by either setting cls to False on the specific index or
+ by setting index_cls to False on the meta dictionary for the document.
+ """
+
+ # The __metaclass__ attribute is removed by 2to3 when running with Python3
+ # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
+ my_metaclass = TopLevelDocumentMetaclass
+ __metaclass__ = TopLevelDocumentMetaclass
+
+ __slots__ = ('__objects')
+
+ def pk():
+ """Primary key alias
+ """
+
+ def fget(self):
+ return getattr(self, self._meta['id_field'])
+
+ def fset(self, value):
+ return setattr(self, self._meta['id_field'], value)
+ return property(fget, fset)
+ pk = pk()
+
+ @classmethod
+ def _get_db(cls):
+ """Some Model using other db_alias"""
+ return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
+
+ @classmethod
+ def _get_collection(cls):
+ """Returns the collection for the document."""
+ if not hasattr(cls, '_collection') or cls._collection is None:
+ db = cls._get_db()
+ collection_name = cls._get_collection_name()
+ # Create collection as a capped collection if specified
+ if cls._meta['max_size'] or cls._meta['max_documents']:
+ # Get max document limit and max byte size from meta
+ max_size = cls._meta['max_size'] or 10000000 # 10MB default
+ max_documents = cls._meta['max_documents']
+
+ if collection_name in db.collection_names():
+ cls._collection = db[collection_name]
+ # The collection already exists, check if its capped
+ # options match the specified capped options
+ options = cls._collection.options()
+ if options.get('max') != max_documents or \
+ options.get('size') != max_size:
+ msg = (('Cannot create collection "%s" as a capped '
+ 'collection as it already exists')
+ % cls._collection)
+ raise InvalidCollectionError(msg)
+ else:
+ # Create the collection as a capped collection
+ opts = {'capped': True, 'size': max_size}
+ if max_documents:
+ opts['max'] = max_documents
+ cls._collection = db.create_collection(
+ collection_name, **opts
+ )
+ else:
+ cls._collection = db[collection_name]
+ if cls._meta.get('auto_create_index', True):
+ cls.ensure_indexes()
+ return cls._collection
+
+ def modify(self, query={}, **update):
+ """Perform an atomic update of the document in the database and reload
+ the document object using updated version.
+
+ Returns True if the document has been updated or False if the document
+ in the database doesn't match the query.
+
+ .. note:: All unsaved changes that has been made to the document are
+ rejected if the method returns True.
+
+ :param query: the update will be performed only if the document in the
+ database matches the query
+ :param update: Django-style update keyword arguments
+ """
+
+ if self.pk is None:
+ raise InvalidDocumentError("The document does not have a primary key.")
+
+ id_field = self._meta["id_field"]
+ query = query.copy() if isinstance(query, dict) else query.to_query(self)
+
+ if id_field not in query:
+ query[id_field] = self.pk
+ elif query[id_field] != self.pk:
+ raise InvalidQueryError("Invalid document modify query: it must modify only this document.")
+
+ updated = self._qs(**query).modify(new=True, **update)
+ if updated is None:
+ return False
+
+ for field in self._fields_ordered:
+ setattr(self, field, self._reload(field, updated[field]))
+
+ self._changed_fields = updated._changed_fields
+ self._created = False
+
+ return True
+
+ def save(self, force_insert=False, validate=True, clean=True,
+ write_concern=None, cascade=None, cascade_kwargs=None,
+ _refs=None, save_condition=None, **kwargs):
+ """Save the :class:`~mongoengine.Document` to the database. If the
+ document already exists, it will be updated, otherwise it will be
+ created.
+
+ :param force_insert: only try to create a new document, don't allow
+ updates of existing documents
+ :param validate: validates the document; set to ``False`` to skip.
+ :param clean: call the document clean method, requires `validate` to be
+ True.
+ :param write_concern: Extra keyword arguments are passed down to
+ :meth:`~pymongo.collection.Collection.save` OR
+ :meth:`~pymongo.collection.Collection.insert`
+ which will be used as options for the resultant
+ ``getLastError`` command. For example,
+ ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
+ wait until at least two servers have recorded the write and
+ will force an fsync on the primary server.
+ :param cascade: Sets the flag for cascading saves. You can set a
+ default by setting "cascade" in the document __meta__
+ :param cascade_kwargs: (optional) kwargs dictionary to be passed throw
+ to cascading saves. Implies ``cascade=True``.
+ :param _refs: A list of processed references used in cascading saves
+ :param save_condition: only perform save if matching record in db
+ satisfies condition(s) (e.g., version number)
+
+ .. versionchanged:: 0.5
+ In existing documents it only saves changed fields using
+ set / unset. Saves are cascaded and any
+ :class:`~bson.dbref.DBRef` objects that have changes are
+ saved as well.
+ .. versionchanged:: 0.6
+ Added cascading saves
+ .. versionchanged:: 0.8
+ Cascade saves are optional and default to False. If you want
+ fine grain control then you can turn off using document
+ meta['cascade'] = True. Also you can pass different kwargs to
+ the cascade save using cascade_kwargs which overwrites the
+ existing kwargs with custom values.
+ .. versionchanged:: 0.8.5
+ Optional save_condition that only overwrites existing documents
+ if the condition is satisfied in the current db record.
+ """
+ signals.pre_save.send(self.__class__, document=self)
+
+ if validate:
+ self.validate(clean=clean)
+
+ if write_concern is None:
+ write_concern = {"w": 1}
+
+ doc = self.to_mongo()
+
+ created = ('_id' not in doc or self._created or force_insert)
+
+ signals.pre_save_post_validation.send(self.__class__, document=self,
+ created=created)
+
+ try:
+ collection = self._get_collection()
+ if self._meta.get('auto_create_index', True):
+ self.ensure_indexes()
+ if created:
+ if force_insert:
+ object_id = collection.insert(doc, **write_concern)
+ else:
+ object_id = collection.save(doc, **write_concern)
+ else:
+ object_id = doc['_id']
+ updates, removals = self._delta()
+ # Need to add shard key to query, or you get an error
+ if save_condition is not None:
+ select_dict = transform.query(self.__class__,
+ **save_condition)
+ else:
+ select_dict = {}
+ select_dict['_id'] = object_id
+ shard_key = self.__class__._meta.get('shard_key', tuple())
+ for k in shard_key:
+ actual_key = self._db_field_map.get(k, k)
+ select_dict[actual_key] = doc[actual_key]
+
+ def is_new_object(last_error):
+ if last_error is not None:
+ updated = last_error.get("updatedExisting")
+ if updated is not None:
+ return not updated
+ return created
+
+ update_query = {}
+
+ if updates:
+ update_query["$set"] = updates
+ if removals:
+ update_query["$unset"] = removals
+ if updates or removals:
+ upsert = save_condition is None
+ last_error = collection.update(select_dict, update_query,
+ upsert=upsert, **write_concern)
+ created = is_new_object(last_error)
+
+ if cascade is None:
+ cascade = self._meta.get(
+ 'cascade', False) or cascade_kwargs is not None
+
+ if cascade:
+ kwargs = {
+ "force_insert": force_insert,
+ "validate": validate,
+ "write_concern": write_concern,
+ "cascade": cascade
+ }
+ if cascade_kwargs: # Allow granular control over cascades
+ kwargs.update(cascade_kwargs)
+ kwargs['_refs'] = _refs
+ self.cascade_save(**kwargs)
+ except pymongo.errors.DuplicateKeyError, err:
+ message = u'Tried to save duplicate unique keys (%s)'
+ raise NotUniqueError(message % unicode(err))
+ except pymongo.errors.OperationFailure, err:
+ message = 'Could not save document (%s)'
+ if re.match('^E1100[01] duplicate key', unicode(err)):
+ # E11000 - duplicate key error index
+ # E11001 - duplicate key on update
+ message = u'Tried to save duplicate unique keys (%s)'
+ raise NotUniqueError(message % unicode(err))
+ raise OperationError(message % unicode(err))
+ id_field = self._meta['id_field']
+ if created or id_field not in self._meta.get('shard_key', []):
+ self[id_field] = self._fields[id_field].to_python(object_id)
+
+ signals.post_save.send(self.__class__, document=self, created=created)
+ self._clear_changed_fields()
+ self._created = False
+ return self
+
+ def cascade_save(self, *args, **kwargs):
+ """Recursively saves any references /
+ generic references on an objects"""
+ _refs = kwargs.get('_refs', []) or []
+
+ ReferenceField = _import_class('ReferenceField')
+ GenericReferenceField = _import_class('GenericReferenceField')
+
+ for name, cls in self._fields.items():
+ if not isinstance(cls, (ReferenceField,
+ GenericReferenceField)):
+ continue
+
+ ref = self._data.get(name)
+ if not ref or isinstance(ref, DBRef):
+ continue
+
+ if not getattr(ref, '_changed_fields', True):
+ continue
+
+ ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data))
+ if ref and ref_id not in _refs:
+ _refs.append(ref_id)
+ kwargs["_refs"] = _refs
+ ref.save(**kwargs)
+ ref._changed_fields = []
+
+ @property
+ def _qs(self):
+ """
+ Returns the queryset to use for updating / reloading / deletions
+ """
+ if not hasattr(self, '__objects'):
+ self.__objects = QuerySet(self, self._get_collection())
+ return self.__objects
+
+ @property
+ def _object_key(self):
+ """Dict to identify object in collection
+ """
+ select_dict = {'pk': self.pk}
+ shard_key = self.__class__._meta.get('shard_key', tuple())
+ for k in shard_key:
+ select_dict[k] = getattr(self, k)
+ return select_dict
+
+ def update(self, **kwargs):
+ """Performs an update on the :class:`~mongoengine.Document`
+ A convenience wrapper to :meth:`~mongoengine.QuerySet.update`.
+
+ Raises :class:`OperationError` if called on an object that has not yet
+ been saved.
+ """
+ if not self.pk:
+ if kwargs.get('upsert', False):
+ query = self.to_mongo()
+ if "_cls" in query:
+ del(query["_cls"])
+ return self._qs.filter(**query).update_one(**kwargs)
+ else:
+ raise OperationError(
+ 'attempt to update a document not yet saved')
+
+ # Need to add shard key to query, or you get an error
+ return self._qs.filter(**self._object_key).update_one(**kwargs)
+
+ def delete(self, **write_concern):
+ """Delete the :class:`~mongoengine.Document` from the database. This
+ will only take effect if the document has been previously saved.
+
+ :param write_concern: Extra keyword arguments are passed down which
+ will be used as options for the resultant
+ ``getLastError`` command. For example,
+ ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
+ wait until at least two servers have recorded the write and
+ will force an fsync on the primary server.
+ """
+ signals.pre_delete.send(self.__class__, document=self)
+
+ try:
+ self._qs.filter(
+ **self._object_key).delete(write_concern=write_concern, _from_doc_delete=True)
+ except pymongo.errors.OperationFailure, err:
+ message = u'Could not delete document (%s)' % err.message
+ raise OperationError(message)
+ signals.post_delete.send(self.__class__, document=self)
+
+ def switch_db(self, db_alias):
+ """
+ Temporarily switch the database for a document instance.
+
+ Only really useful for archiving off data and calling `save()`::
+
+ user = User.objects.get(id=user_id)
+ user.switch_db('archive-db')
+ user.save()
+
+ :param str db_alias: The database alias to use for saving the document
+
+ .. seealso::
+ Use :class:`~mongoengine.context_managers.switch_collection`
+ if you need to read from another collection
+ """
+ with switch_db(self.__class__, db_alias) as cls:
+ collection = cls._get_collection()
+ db = cls._get_db()
+ self._get_collection = lambda: collection
+ self._get_db = lambda: db
+ self._collection = collection
+ self._created = True
+ self.__objects = self._qs
+ self.__objects._collection_obj = collection
+ return self
+
+ def switch_collection(self, collection_name):
+ """
+ Temporarily switch the collection for a document instance.
+
+ Only really useful for archiving off data and calling `save()`::
+
+ user = User.objects.get(id=user_id)
+ user.switch_collection('old-users')
+ user.save()
+
+ :param str collection_name: The database alias to use for saving the
+ document
+
+ .. seealso::
+ Use :class:`~mongoengine.context_managers.switch_db`
+ if you need to read from another database
+ """
+ with switch_collection(self.__class__, collection_name) as cls:
+ collection = cls._get_collection()
+ self._get_collection = lambda: collection
+ self._collection = collection
+ self._created = True
+ self.__objects = self._qs
+ self.__objects._collection_obj = collection
+ return self
+
+ def select_related(self, max_depth=1):
+ """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
+ a maximum depth in order to cut down the number queries to mongodb.
+
+ .. versionadded:: 0.5
+ """
+ DeReference = _import_class('DeReference')
+ DeReference()([self], max_depth + 1)
+ return self
+
+ def reload(self, *fields, **kwargs):
+ """Reloads all attributes from the database.
+
+ :param fields: (optional) args list of fields to reload
+ :param max_depth: (optional) depth of dereferencing to follow
+
+ .. versionadded:: 0.1.2
+ .. versionchanged:: 0.6 Now chainable
+ .. versionchanged:: 0.9 Can provide specific fields to reload
+ """
+ max_depth = 1
+ if fields and isinstance(fields[0], int):
+ max_depth = fields[0]
+ fields = fields[1:]
+ elif "max_depth" in kwargs:
+ max_depth = kwargs["max_depth"]
+
+ if not self.pk:
+ raise self.DoesNotExist("Document does not exist")
+ obj = self._qs.read_preference(ReadPreference.PRIMARY).filter(
+ **self._object_key).only(*fields).limit(1
+ ).select_related(max_depth=max_depth)
+
+ if obj:
+ obj = obj[0]
+ else:
+ raise self.DoesNotExist("Document does not exist")
+
+ for field in self._fields_ordered:
+ if not fields or field in fields:
+ try:
+ setattr(self, field, self._reload(field, obj[field]))
+ except KeyError:
+ # If field is removed from the database while the object
+ # is in memory, a reload would cause a KeyError
+ # i.e. obj.update(unset__field=1) followed by obj.reload()
+ delattr(self, field)
+
+ self._changed_fields = obj._changed_fields
+ self._created = False
+ return self
+
+ def _reload(self, key, value):
+ """Used by :meth:`~mongoengine.Document.reload` to ensure the
+ correct instance is linked to self.
+ """
+ if isinstance(value, BaseDict):
+ value = [(k, self._reload(k, v)) for k, v in value.items()]
+ value = BaseDict(value, self, key)
+ elif isinstance(value, EmbeddedDocumentList):
+ value = [self._reload(key, v) for v in value]
+ value = EmbeddedDocumentList(value, self, key)
+ elif isinstance(value, BaseList):
+ value = [self._reload(key, v) for v in value]
+ value = BaseList(value, self, key)
+ elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
+ value._instance = None
+ value._changed_fields = []
+ return value
+
+ def to_dbref(self):
+ """Returns an instance of :class:`~bson.dbref.DBRef` useful in
+ `__raw__` queries."""
+ if not self.pk:
+ msg = "Only saved documents can have a valid dbref"
+ raise OperationError(msg)
+ return DBRef(self.__class__._get_collection_name(), self.pk)
+
+ @classmethod
+ def register_delete_rule(cls, document_cls, field_name, rule):
+ """This method registers the delete rules to apply when removing this
+ object.
+ """
+ classes = [get_document(class_name)
+ for class_name in cls._subclasses
+ if class_name != cls.__name__] + [cls]
+ documents = [get_document(class_name)
+ for class_name in document_cls._subclasses
+ if class_name != document_cls.__name__] + [document_cls]
+
+ for cls in classes:
+ for document_cls in documents:
+ delete_rules = cls._meta.get('delete_rules') or {}
+ delete_rules[(document_cls, field_name)] = rule
+ cls._meta['delete_rules'] = delete_rules
+
+ @classmethod
+ def drop_collection(cls):
+ """Drops the entire collection associated with this
+ :class:`~mongoengine.Document` type from the database.
+ """
+ cls._collection = None
+ db = cls._get_db()
+ db.drop_collection(cls._get_collection_name())
+
+ @classmethod
+ def ensure_index(cls, key_or_list, drop_dups=False, background=False,
+ **kwargs):
+ """Ensure that the given indexes are in place.
+
+ :param key_or_list: a single index key or a list of index keys (to
+ construct a multi-field index); keys may be prefixed with a **+**
+ or a **-** to determine the index ordering
+ """
+ index_spec = cls._build_index_spec(key_or_list)
+ index_spec = index_spec.copy()
+ fields = index_spec.pop('fields')
+ index_spec['drop_dups'] = drop_dups
+ index_spec['background'] = background
+ index_spec.update(kwargs)
+
+ return cls._get_collection().ensure_index(fields, **index_spec)
+
+ @classmethod
+ def ensure_indexes(cls):
+ """Checks the document meta data and ensures all the indexes exist.
+
+ Global defaults can be set in the meta - see :doc:`guide/defining-documents`
+
+ .. note:: You can disable automatic index creation by setting
+ `auto_create_index` to False in the documents meta data
+ """
+ background = cls._meta.get('index_background', False)
+ drop_dups = cls._meta.get('index_drop_dups', False)
+ index_opts = cls._meta.get('index_opts') or {}
+ index_cls = cls._meta.get('index_cls', True)
+
+ collection = cls._get_collection()
+ # 746: when connection is via mongos, the read preference is not necessarily an indication that
+ # this code runs on a secondary
+ if not collection.is_mongos and collection.read_preference > 1:
+ return
+
+ # determine if an index which we are creating includes
+ # _cls as its first field; if so, we can avoid creating
+ # an extra index on _cls, as mongodb will use the existing
+ # index to service queries against _cls
+ cls_indexed = False
+
+ # Ensure document-defined indexes are created
+ if cls._meta['index_specs']:
+ index_spec = cls._meta['index_specs']
+ for spec in index_spec:
+ spec = spec.copy()
+ fields = spec.pop('fields')
+ cls_indexed = cls_indexed or includes_cls(fields)
+ opts = index_opts.copy()
+ opts.update(spec)
+ collection.ensure_index(fields, background=background,
+ drop_dups=drop_dups, **opts)
+
+ # If _cls is being used (for polymorphism), it needs an index,
+ # only if another index doesn't begin with _cls
+ if (index_cls and not cls_indexed and
+ cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
+ collection.ensure_index('_cls', background=background,
+ **index_opts)
+
+ @classmethod
+ def list_indexes(cls, go_up=True, go_down=True):
+ """ Lists all of the indexes that should be created for given
+ collection. It includes all the indexes from super- and sub-classes.
+ """
+
+ if cls._meta.get('abstract'):
+ return []
+
+ # get all the base classes, subclasses and siblings
+ classes = []
+
+ def get_classes(cls):
+
+ if (cls not in classes and
+ isinstance(cls, TopLevelDocumentMetaclass)):
+ classes.append(cls)
+
+ for base_cls in cls.__bases__:
+ if (isinstance(base_cls, TopLevelDocumentMetaclass) and
+ base_cls != Document and
+ not base_cls._meta.get('abstract') and
+ base_cls._get_collection().full_name == cls._get_collection().full_name and
+ base_cls not in classes):
+ classes.append(base_cls)
+ get_classes(base_cls)
+ for subclass in cls.__subclasses__():
+ if (isinstance(base_cls, TopLevelDocumentMetaclass) and
+ subclass._get_collection().full_name == cls._get_collection().full_name and
+ subclass not in classes):
+ classes.append(subclass)
+ get_classes(subclass)
+
+ get_classes(cls)
+
+ # get the indexes spec for all of the gathered classes
+ def get_indexes_spec(cls):
+ indexes = []
+
+ if cls._meta['index_specs']:
+ index_spec = cls._meta['index_specs']
+ for spec in index_spec:
+ spec = spec.copy()
+ fields = spec.pop('fields')
+ indexes.append(fields)
+ return indexes
+
+ indexes = []
+ for cls in classes:
+ for index in get_indexes_spec(cls):
+ if index not in indexes:
+ indexes.append(index)
+
+ # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
+ if [(u'_id', 1)] not in indexes:
+ indexes.append([(u'_id', 1)])
+ if (cls._meta.get('index_cls', True) and
+ cls._meta.get('allow_inheritance', ALLOW_INHERITANCE) is True):
+ indexes.append([(u'_cls', 1)])
+
+ return indexes
+
+ @classmethod
+ def compare_indexes(cls):
+ """ Compares the indexes defined in MongoEngine with the ones existing
+ in the database. Returns any missing/extra indexes.
+ """
+
+ required = cls.list_indexes()
+ existing = [info['key']
+ for info in cls._get_collection().index_information().values()]
+ missing = [index for index in required if index not in existing]
+ extra = [index for index in existing if index not in required]
+
+ # if { _cls: 1 } is missing, make sure it's *really* necessary
+ if [(u'_cls', 1)] in missing:
+ cls_obsolete = False
+ for index in existing:
+ if includes_cls(index) and index not in extra:
+ cls_obsolete = True
+ break
+ if cls_obsolete:
+ missing.remove([(u'_cls', 1)])
+
+ return {'missing': missing, 'extra': extra}
+
+
+class DynamicDocument(Document):
+
+ """A Dynamic Document class allowing flexible, expandable and uncontrolled
+ schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
+ way as an ordinary document but has expando style properties. Any data
+ passed or set against the :class:`~mongoengine.DynamicDocument` that is
+ not a field is automatically converted into a
+ :class:`~mongoengine.fields.DynamicField` and data can be attributed to that
+ field.
+
+ .. note::
+
+ There is one caveat on Dynamic Documents: fields cannot start with `_`
+ """
+
+ # The __metaclass__ attribute is removed by 2to3 when running with Python3
+ # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
+ my_metaclass = TopLevelDocumentMetaclass
+ __metaclass__ = TopLevelDocumentMetaclass
+
+ _dynamic = True
+
+ def __delattr__(self, *args, **kwargs):
+ """Deletes the attribute by setting to None and allowing _delta to unset
+ it"""
+ field_name = args[0]
+ if field_name in self._dynamic_fields:
+ setattr(self, field_name, None)
+ else:
+ super(DynamicDocument, self).__delattr__(*args, **kwargs)
+
+
+class DynamicEmbeddedDocument(EmbeddedDocument):
+
+ """A Dynamic Embedded Document class allowing flexible, expandable and
+ uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more
+ information about dynamic documents.
+ """
+
+ # The __metaclass__ attribute is removed by 2to3 when running with Python3
+ # my_metaclass is defined so that metaclass can be queried in Python 2 & 3
+ my_metaclass = DocumentMetaclass
+ __metaclass__ = DocumentMetaclass
+
+ _dynamic = True
+
+ def __delattr__(self, *args, **kwargs):
+ """Deletes the attribute by setting to None and allowing _delta to unset
+ it"""
+ field_name = args[0]
+ if field_name in self._fields:
+ default = self._fields[field_name].default
+ if callable(default):
+ default = default()
+ setattr(self, field_name, default)
+ else:
+ setattr(self, field_name, None)
+
+
+class MapReduceDocument(object):
+
+ """A document returned from a map/reduce query.
+
+ :param collection: An instance of :class:`~pymongo.Collection`
+ :param key: Document/result key, often an instance of
+ :class:`~bson.objectid.ObjectId`. If supplied as
+ an ``ObjectId`` found in the given ``collection``,
+ the object can be accessed via the ``object`` property.
+ :param value: The result(s) for this key.
+
+ .. versionadded:: 0.3
+ """
+
+ def __init__(self, document, collection, key, value):
+ self._document = document
+ self._collection = collection
+ self.key = key
+ self.value = value
+
+ @property
+ def object(self):
+ """Lazy-load the object referenced by ``self.key``. ``self.key``
+ should be the ``primary_key``.
+ """
+ id_field = self._document()._meta['id_field']
+ id_field_type = type(id_field)
+
+ if not isinstance(self.key, id_field_type):
+ try:
+ self.key = id_field_type(self.key)
+ except:
+ raise Exception("Could not cast key as %s" %
+ id_field_type.__name__)
+
+ if not hasattr(self, "_key_object"):
+ self._key_object = self._document.objects.with_id(self.key)
+ return self._key_object
+ return self._key_object
diff --git a/awx/lib/site-packages/mongoengine/errors.py b/awx/lib/site-packages/mongoengine/errors.py
new file mode 100644
index 0000000000..6cde7771e5
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/errors.py
@@ -0,0 +1,131 @@
+from collections import defaultdict
+
+from mongoengine.python_support import txt_type
+
+
+__all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError',
+ 'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError',
+ 'OperationError', 'NotUniqueError', 'FieldDoesNotExist',
+ 'ValidationError')
+
+
+class NotRegistered(Exception):
+ pass
+
+
+class InvalidDocumentError(Exception):
+ pass
+
+
+class LookUpError(AttributeError):
+ pass
+
+
+class DoesNotExist(Exception):
+ pass
+
+
+class MultipleObjectsReturned(Exception):
+ pass
+
+
+class InvalidQueryError(Exception):
+ pass
+
+
+class OperationError(Exception):
+ pass
+
+
+class NotUniqueError(OperationError):
+ pass
+
+
+class FieldDoesNotExist(Exception):
+ pass
+
+
+class ValidationError(AssertionError):
+ """Validation exception.
+
+ May represent an error validating a field or a
+ document containing fields with validation errors.
+
+ :ivar errors: A dictionary of errors for fields within this
+ document or list, or None if the error is for an
+ individual field.
+ """
+
+ errors = {}
+ field_name = None
+ _message = None
+
+ def __init__(self, message="", **kwargs):
+ self.errors = kwargs.get('errors', {})
+ self.field_name = kwargs.get('field_name')
+ self.message = message
+
+ def __str__(self):
+ return txt_type(self.message)
+
+ def __repr__(self):
+ return '%s(%s,)' % (self.__class__.__name__, self.message)
+
+ def __getattribute__(self, name):
+ message = super(ValidationError, self).__getattribute__(name)
+ if name == 'message':
+ if self.field_name:
+ message = '%s' % message
+ if self.errors:
+ message = '%s(%s)' % (message, self._format_errors())
+ return message
+
+ def _get_message(self):
+ return self._message
+
+ def _set_message(self, message):
+ self._message = message
+
+ message = property(_get_message, _set_message)
+
+ def to_dict(self):
+ """Returns a dictionary of all errors within a document
+
+ Keys are field names or list indices and values are the
+ validation error messages, or a nested dictionary of
+ errors for an embedded document or list.
+ """
+
+ def build_dict(source):
+ errors_dict = {}
+ if not source:
+ return errors_dict
+ if isinstance(source, dict):
+ for field_name, error in source.iteritems():
+ errors_dict[field_name] = build_dict(error)
+ elif isinstance(source, ValidationError) and source.errors:
+ return build_dict(source.errors)
+ else:
+ return unicode(source)
+ return errors_dict
+ if not self.errors:
+ return {}
+ return build_dict(self.errors)
+
+ def _format_errors(self):
+ """Returns a string listing all errors within a document"""
+
+ def generate_key(value, prefix=''):
+ if isinstance(value, list):
+ value = ' '.join([generate_key(k) for k in value])
+ if isinstance(value, dict):
+ value = ' '.join(
+ [generate_key(v, k) for k, v in value.iteritems()])
+
+ results = "%s.%s" % (prefix, value) if prefix else value
+ return results
+
+ error_dict = defaultdict(list)
+ for k, v in self.to_dict().iteritems():
+ error_dict[generate_key(v)].append(k)
+ return ' '.join(["%s: %s" % (k, v) for k, v in error_dict.iteritems()])
diff --git a/awx/lib/site-packages/mongoengine/fields.py b/awx/lib/site-packages/mongoengine/fields.py
new file mode 100644
index 0000000000..3c1db4ac38
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/fields.py
@@ -0,0 +1,2006 @@
+import datetime
+import decimal
+import itertools
+import re
+import time
+import urllib2
+import uuid
+import warnings
+from operator import itemgetter
+
+try:
+ import dateutil
+except ImportError:
+ dateutil = None
+else:
+ import dateutil.parser
+
+import pymongo
+import gridfs
+from bson import Binary, DBRef, SON, ObjectId
+
+from mongoengine.errors import ValidationError
+from mongoengine.python_support import (PY3, bin_type, txt_type,
+ str_types, StringIO)
+from base import (BaseField, ComplexBaseField, ObjectIdField, GeoJsonBaseField,
+ get_document, BaseDocument)
+from queryset import DO_NOTHING, QuerySet
+from document import Document, EmbeddedDocument
+from connection import get_db, DEFAULT_CONNECTION_NAME
+
+try:
+ from PIL import Image, ImageOps
+except ImportError:
+ Image = None
+ ImageOps = None
+
+__all__ = [
+ 'StringField', 'URLField', 'EmailField', 'IntField', 'LongField',
+ 'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField',
+ 'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField',
+ 'GenericEmbeddedDocumentField', 'DynamicField', 'ListField',
+ 'SortedListField', 'EmbeddedDocumentListField', 'DictField',
+ 'MapField', 'ReferenceField', 'CachedReferenceField',
+ 'GenericReferenceField', 'BinaryField', 'GridFSError', 'GridFSProxy',
+ 'FileField', 'ImageGridFsProxy', 'ImproperlyConfigured', 'ImageField',
+ 'GeoPointField', 'PointField', 'LineStringField', 'PolygonField',
+ 'SequenceField', 'UUIDField', 'MultiPointField', 'MultiLineStringField',
+ 'MultiPolygonField', 'GeoJsonBaseField']
+
+
+RECURSIVE_REFERENCE_CONSTANT = 'self'
+
+
+class StringField(BaseField):
+
+ """A unicode string field.
+ """
+
+ def __init__(self, regex=None, max_length=None, min_length=None, **kwargs):
+ self.regex = re.compile(regex) if regex else None
+ self.max_length = max_length
+ self.min_length = min_length
+ super(StringField, self).__init__(**kwargs)
+
+ def to_python(self, value):
+ if isinstance(value, unicode):
+ return value
+ try:
+ value = value.decode('utf-8')
+ except:
+ pass
+ return value
+
+ def validate(self, value):
+ if not isinstance(value, basestring):
+ self.error('StringField only accepts string values')
+
+ if self.max_length is not None and len(value) > self.max_length:
+ self.error('String value is too long')
+
+ if self.min_length is not None and len(value) < self.min_length:
+ self.error('String value is too short')
+
+ if self.regex is not None and self.regex.match(value) is None:
+ self.error('String value did not match validation regex')
+
+ def lookup_member(self, member_name):
+ return None
+
+ def prepare_query_value(self, op, value):
+ if not isinstance(op, basestring):
+ return value
+
+ if op.lstrip('i') in ('startswith', 'endswith', 'contains', 'exact'):
+ flags = 0
+ if op.startswith('i'):
+ flags = re.IGNORECASE
+ op = op.lstrip('i')
+
+ regex = r'%s'
+ if op == 'startswith':
+ regex = r'^%s'
+ elif op == 'endswith':
+ regex = r'%s$'
+ elif op == 'exact':
+ regex = r'^%s$'
+
+ # escape unsafe characters which could lead to a re.error
+ value = re.escape(value)
+ value = re.compile(regex % value, flags)
+ return value
+
+
+class URLField(StringField):
+
+ """A field that validates input as an URL.
+
+ .. versionadded:: 0.3
+ """
+
+ _URL_REGEX = re.compile(
+ r'^(?:http|ftp)s?://' # http:// or https://
+ # domain...
+ r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
+ r'localhost|' # localhost...
+ r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
+ r'(?::\d+)?' # optional port
+ r'(?:/?|[/?]\S+)$', re.IGNORECASE)
+
+ def __init__(self, verify_exists=False, url_regex=None, **kwargs):
+ self.verify_exists = verify_exists
+ self.url_regex = url_regex or self._URL_REGEX
+ super(URLField, self).__init__(**kwargs)
+
+ def validate(self, value):
+ if not self.url_regex.match(value):
+ self.error('Invalid URL: %s' % value)
+ return
+
+ if self.verify_exists:
+ warnings.warn(
+ "The URLField verify_exists argument has intractable security "
+ "and performance issues. Accordingly, it has been deprecated.",
+ DeprecationWarning)
+ try:
+ request = urllib2.Request(value)
+ urllib2.urlopen(request)
+ except Exception, e:
+ self.error('This URL appears to be a broken link: %s' % e)
+
+
+class EmailField(StringField):
+
+ """A field that validates input as an E-Mail-Address.
+
+ .. versionadded:: 0.4
+ """
+
+ EMAIL_REGEX = re.compile(
+ # dot-atom
+ r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"
+ # quoted-string
+ r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"'
+ # domain (max length of an ICAAN TLD is 22 characters)
+ r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,22}$', re.IGNORECASE
+ )
+
+ def validate(self, value):
+ if not EmailField.EMAIL_REGEX.match(value):
+ self.error('Invalid Mail-address: %s' % value)
+ super(EmailField, self).validate(value)
+
+
+class IntField(BaseField):
+
+ """An 32-bit integer field.
+ """
+
+ def __init__(self, min_value=None, max_value=None, **kwargs):
+ self.min_value, self.max_value = min_value, max_value
+ super(IntField, self).__init__(**kwargs)
+
+ def to_python(self, value):
+ try:
+ value = int(value)
+ except ValueError:
+ pass
+ return value
+
+ def validate(self, value):
+ try:
+ value = int(value)
+ except:
+ self.error('%s could not be converted to int' % value)
+
+ if self.min_value is not None and value < self.min_value:
+ self.error('Integer value is too small')
+
+ if self.max_value is not None and value > self.max_value:
+ self.error('Integer value is too large')
+
+ def prepare_query_value(self, op, value):
+ if value is None:
+ return value
+
+ return int(value)
+
+
+class LongField(BaseField):
+
+ """An 64-bit integer field.
+ """
+
+ def __init__(self, min_value=None, max_value=None, **kwargs):
+ self.min_value, self.max_value = min_value, max_value
+ super(LongField, self).__init__(**kwargs)
+
+ def to_python(self, value):
+ try:
+ value = long(value)
+ except ValueError:
+ pass
+ return value
+
+ def validate(self, value):
+ try:
+ value = long(value)
+ except:
+ self.error('%s could not be converted to long' % value)
+
+ if self.min_value is not None and value < self.min_value:
+ self.error('Long value is too small')
+
+ if self.max_value is not None and value > self.max_value:
+ self.error('Long value is too large')
+
+ def prepare_query_value(self, op, value):
+ if value is None:
+ return value
+
+ return long(value)
+
+
+class FloatField(BaseField):
+
+ """An floating point number field.
+ """
+
+ def __init__(self, min_value=None, max_value=None, **kwargs):
+ self.min_value, self.max_value = min_value, max_value
+ super(FloatField, self).__init__(**kwargs)
+
+ def to_python(self, value):
+ try:
+ value = float(value)
+ except ValueError:
+ pass
+ return value
+
+ def validate(self, value):
+ if isinstance(value, int):
+ value = float(value)
+ if not isinstance(value, float):
+ self.error('FloatField only accepts float values')
+
+ if self.min_value is not None and value < self.min_value:
+ self.error('Float value is too small')
+
+ if self.max_value is not None and value > self.max_value:
+ self.error('Float value is too large')
+
+ def prepare_query_value(self, op, value):
+ if value is None:
+ return value
+
+ return float(value)
+
+
+class DecimalField(BaseField):
+
+ """A fixed-point decimal number field.
+
+ .. versionchanged:: 0.8
+ .. versionadded:: 0.3
+ """
+
+ def __init__(self, min_value=None, max_value=None, force_string=False,
+ precision=2, rounding=decimal.ROUND_HALF_UP, **kwargs):
+ """
+ :param min_value: Validation rule for the minimum acceptable value.
+ :param max_value: Validation rule for the maximum acceptable value.
+ :param force_string: Store as a string.
+ :param precision: Number of decimal places to store.
+ :param rounding: The rounding rule from the python decimal library:
+
+ - decimal.ROUND_CEILING (towards Infinity)
+ - decimal.ROUND_DOWN (towards zero)
+ - decimal.ROUND_FLOOR (towards -Infinity)
+ - decimal.ROUND_HALF_DOWN (to nearest with ties going towards zero)
+ - decimal.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer)
+ - decimal.ROUND_HALF_UP (to nearest with ties going away from zero)
+ - decimal.ROUND_UP (away from zero)
+ - decimal.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero)
+
+ Defaults to: ``decimal.ROUND_HALF_UP``
+
+ """
+ self.min_value = min_value
+ self.max_value = max_value
+ self.force_string = force_string
+ self.precision = precision
+ self.rounding = rounding
+
+ super(DecimalField, self).__init__(**kwargs)
+
+ def to_python(self, value):
+ if value is None:
+ return value
+
+ # Convert to string for python 2.6 before casting to Decimal
+ try:
+ value = decimal.Decimal("%s" % value)
+ except decimal.InvalidOperation:
+ return value
+ return value.quantize(decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding)
+
+ def to_mongo(self, value, use_db_field=True):
+ if value is None:
+ return value
+ if self.force_string:
+ return unicode(value)
+ return float(self.to_python(value))
+
+ def validate(self, value):
+ if not isinstance(value, decimal.Decimal):
+ if not isinstance(value, basestring):
+ value = unicode(value)
+ try:
+ value = decimal.Decimal(value)
+ except Exception, exc:
+ self.error('Could not convert value to decimal: %s' % exc)
+
+ if self.min_value is not None and value < self.min_value:
+ self.error('Decimal value is too small')
+
+ if self.max_value is not None and value > self.max_value:
+ self.error('Decimal value is too large')
+
+ def prepare_query_value(self, op, value):
+ return self.to_mongo(value)
+
+
+class BooleanField(BaseField):
+
+ """A boolean field type.
+
+ .. versionadded:: 0.1.2
+ """
+
+ def to_python(self, value):
+ try:
+ value = bool(value)
+ except ValueError:
+ pass
+ return value
+
+ def validate(self, value):
+ if not isinstance(value, bool):
+ self.error('BooleanField only accepts boolean values')
+
+
+class DateTimeField(BaseField):
+
+ """A datetime field.
+
+ Uses the python-dateutil library if available alternatively use time.strptime
+ to parse the dates. Note: python-dateutil's parser is fully featured and when
+ installed you can utilise it to convert varying types of date formats into valid
+ python datetime objects.
+
+ Note: Microseconds are rounded to the nearest millisecond.
+ Pre UTC microsecond support is effectively broken.
+ Use :class:`~mongoengine.fields.ComplexDateTimeField` if you
+ need accurate microsecond support.
+ """
+
+ def validate(self, value):
+ new_value = self.to_mongo(value)
+ if not isinstance(new_value, (datetime.datetime, datetime.date)):
+ self.error(u'cannot parse date "%s"' % value)
+
+ def to_mongo(self, value):
+ if value is None:
+ return value
+ if isinstance(value, datetime.datetime):
+ return value
+ if isinstance(value, datetime.date):
+ return datetime.datetime(value.year, value.month, value.day)
+ if callable(value):
+ return value()
+
+ if not isinstance(value, basestring):
+ return None
+
+ # Attempt to parse a datetime:
+ if dateutil:
+ try:
+ return dateutil.parser.parse(value)
+ except (TypeError, ValueError):
+ return None
+
+ # split usecs, because they are not recognized by strptime.
+ if '.' in value:
+ try:
+ value, usecs = value.split('.')
+ usecs = int(usecs)
+ except ValueError:
+ return None
+ else:
+ usecs = 0
+ kwargs = {'microsecond': usecs}
+ try: # Seconds are optional, so try converting seconds first.
+ return datetime.datetime(*time.strptime(value,
+ '%Y-%m-%d %H:%M:%S')[:6], **kwargs)
+ except ValueError:
+ try: # Try without seconds.
+ return datetime.datetime(*time.strptime(value,
+ '%Y-%m-%d %H:%M')[:5], **kwargs)
+ except ValueError: # Try without hour/minutes/seconds.
+ try:
+ return datetime.datetime(*time.strptime(value,
+ '%Y-%m-%d')[:3], **kwargs)
+ except ValueError:
+ return None
+
+ def prepare_query_value(self, op, value):
+ return self.to_mongo(value)
+
+
+class ComplexDateTimeField(StringField):
+
+ """
+ ComplexDateTimeField handles microseconds exactly instead of rounding
+ like DateTimeField does.
+
+ Derives from a StringField so you can do `gte` and `lte` filtering by
+ using lexicographical comparison when filtering / sorting strings.
+
+ The stored string has the following format:
+
+ YYYY,MM,DD,HH,MM,SS,NNNNNN
+
+ Where NNNNNN is the number of microseconds of the represented `datetime`.
+ The `,` as the separator can be easily modified by passing the `separator`
+ keyword when initializing the field.
+
+ .. versionadded:: 0.5
+ """
+
+ def __init__(self, separator=',', **kwargs):
+ self.names = ['year', 'month', 'day', 'hour', 'minute', 'second',
+ 'microsecond']
+ self.separtor = separator
+ super(ComplexDateTimeField, self).__init__(**kwargs)
+
+ def _leading_zero(self, number):
+ """
+ Converts the given number to a string.
+
+ If it has only one digit, a leading zero so as it has always at least
+ two digits.
+ """
+ if int(number) < 10:
+ return "0%s" % number
+ else:
+ return str(number)
+
+ def _convert_from_datetime(self, val):
+ """
+ Convert a `datetime` object to a string representation (which will be
+ stored in MongoDB). This is the reverse function of
+ `_convert_from_string`.
+
+ >>> a = datetime(2011, 6, 8, 20, 26, 24, 192284)
+ >>> RealDateTimeField()._convert_from_datetime(a)
+ '2011,06,08,20,26,24,192284'
+ """
+ data = []
+ for name in self.names:
+ data.append(self._leading_zero(getattr(val, name)))
+ return ','.join(data)
+
+ def _convert_from_string(self, data):
+ """
+ Convert a string representation to a `datetime` object (the object you
+ will manipulate). This is the reverse function of
+ `_convert_from_datetime`.
+
+ >>> a = '2011,06,08,20,26,24,192284'
+ >>> ComplexDateTimeField()._convert_from_string(a)
+ datetime.datetime(2011, 6, 8, 20, 26, 24, 192284)
+ """
+ data = data.split(',')
+ data = map(int, data)
+ values = {}
+ for i in range(7):
+ values[self.names[i]] = data[i]
+ return datetime.datetime(**values)
+
+ def __get__(self, instance, owner):
+ data = super(ComplexDateTimeField, self).__get__(instance, owner)
+ if data is None:
+ return None if self.null else datetime.datetime.now()
+ if isinstance(data, datetime.datetime):
+ return data
+ return self._convert_from_string(data)
+
+ def __set__(self, instance, value):
+ value = self._convert_from_datetime(value) if value else value
+ return super(ComplexDateTimeField, self).__set__(instance, value)
+
+ def validate(self, value):
+ value = self.to_python(value)
+ if not isinstance(value, datetime.datetime):
+ self.error('Only datetime objects may used in a '
+ 'ComplexDateTimeField')
+
+ def to_python(self, value):
+ original_value = value
+ try:
+ return self._convert_from_string(value)
+ except:
+ return original_value
+
+ def to_mongo(self, value):
+ value = self.to_python(value)
+ return self._convert_from_datetime(value)
+
+ def prepare_query_value(self, op, value):
+ return self._convert_from_datetime(value)
+
+
+class EmbeddedDocumentField(BaseField):
+
+ """An embedded document field - with a declared document_type.
+ Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
+ """
+
+ def __init__(self, document_type, **kwargs):
+ if not isinstance(document_type, basestring):
+ if not issubclass(document_type, EmbeddedDocument):
+ self.error('Invalid embedded document class provided to an '
+ 'EmbeddedDocumentField')
+ self.document_type_obj = document_type
+ super(EmbeddedDocumentField, self).__init__(**kwargs)
+
+ @property
+ def document_type(self):
+ if isinstance(self.document_type_obj, basestring):
+ if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
+ self.document_type_obj = self.owner_document
+ else:
+ self.document_type_obj = get_document(self.document_type_obj)
+ return self.document_type_obj
+
+ def to_python(self, value):
+ if not isinstance(value, self.document_type):
+ return self.document_type._from_son(value)
+ return value
+
+ def to_mongo(self, value, use_db_field=True, fields=[]):
+ if not isinstance(value, self.document_type):
+ return value
+ return self.document_type.to_mongo(value, use_db_field,
+ fields=fields)
+
+ def validate(self, value, clean=True):
+ """Make sure that the document instance is an instance of the
+ EmbeddedDocument subclass provided when the document was defined.
+ """
+ # Using isinstance also works for subclasses of self.document
+ if not isinstance(value, self.document_type):
+ self.error('Invalid embedded document instance provided to an '
+ 'EmbeddedDocumentField')
+ self.document_type.validate(value, clean)
+
+ def lookup_member(self, member_name):
+ return self.document_type._fields.get(member_name)
+
+ def prepare_query_value(self, op, value):
+ return self.to_mongo(value)
+
+
+class GenericEmbeddedDocumentField(BaseField):
+
+ """A generic embedded document field - allows any
+ :class:`~mongoengine.EmbeddedDocument` to be stored.
+
+ Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`.
+
+ .. note ::
+ You can use the choices param to limit the acceptable
+ EmbeddedDocument types
+ """
+
+ def prepare_query_value(self, op, value):
+ return self.to_mongo(value)
+
+ def to_python(self, value):
+ if isinstance(value, dict):
+ doc_cls = get_document(value['_cls'])
+ value = doc_cls._from_son(value)
+
+ return value
+
+ def validate(self, value, clean=True):
+ if not isinstance(value, EmbeddedDocument):
+ self.error('Invalid embedded document instance provided to an '
+ 'GenericEmbeddedDocumentField')
+
+ value.validate(clean=clean)
+
+ def to_mongo(self, document, use_db_field=True):
+ if document is None:
+ return None
+
+ data = document.to_mongo(use_db_field)
+ if not '_cls' in data:
+ data['_cls'] = document._class_name
+ return data
+
+
+class DynamicField(BaseField):
+
+ """A truly dynamic field type capable of handling different and varying
+ types of data.
+
+ Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data"""
+
+ def to_mongo(self, value):
+ """Convert a Python type to a MongoDB compatible type.
+ """
+
+ if isinstance(value, basestring):
+ return value
+
+ if hasattr(value, 'to_mongo'):
+ cls = value.__class__
+ val = value.to_mongo()
+ # If we its a document thats not inherited add _cls
+ if (isinstance(value, Document)):
+ val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
+ if (isinstance(value, EmbeddedDocument)):
+ val['_cls'] = cls.__name__
+ return val
+
+ if not isinstance(value, (dict, list, tuple)):
+ return value
+
+ is_list = False
+ if not hasattr(value, 'items'):
+ is_list = True
+ value = dict([(k, v) for k, v in enumerate(value)])
+
+ data = {}
+ for k, v in value.iteritems():
+ data[k] = self.to_mongo(v)
+
+ value = data
+ if is_list: # Convert back to a list
+ value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))]
+ return value
+
+ def to_python(self, value):
+ if isinstance(value, dict) and '_cls' in value:
+ doc_cls = get_document(value['_cls'])
+ if '_ref' in value:
+ value = doc_cls._get_db().dereference(value['_ref'])
+ return doc_cls._from_son(value)
+
+ return super(DynamicField, self).to_python(value)
+
+ def lookup_member(self, member_name):
+ return member_name
+
+ def prepare_query_value(self, op, value):
+ if isinstance(value, basestring):
+ from mongoengine.fields import StringField
+ return StringField().prepare_query_value(op, value)
+ return self.to_mongo(value)
+
+ def validate(self, value, clean=True):
+ if hasattr(value, "validate"):
+ value.validate(clean=clean)
+
+
+class ListField(ComplexBaseField):
+
+ """A list field that wraps a standard field, allowing multiple instances
+ of the field to be used as a list in the database.
+
+ If using with ReferenceFields see: :ref:`one-to-many-with-listfields`
+
+ .. note::
+ Required means it cannot be empty - as the default for ListFields is []
+ """
+
+ def __init__(self, field=None, **kwargs):
+ self.field = field
+ kwargs.setdefault('default', lambda: [])
+ super(ListField, self).__init__(**kwargs)
+
+ def validate(self, value):
+ """Make sure that a list of valid fields is being used.
+ """
+ if (not isinstance(value, (list, tuple, QuerySet)) or
+ isinstance(value, basestring)):
+ self.error('Only lists and tuples may be used in a list field')
+ super(ListField, self).validate(value)
+
+ def prepare_query_value(self, op, value):
+ if self.field:
+ if op in ('set', 'unset') and (not isinstance(value, basestring)
+ and not isinstance(value, BaseDocument)
+ and hasattr(value, '__iter__')):
+ return [self.field.prepare_query_value(op, v) for v in value]
+ return self.field.prepare_query_value(op, value)
+ return super(ListField, self).prepare_query_value(op, value)
+
+
+class EmbeddedDocumentListField(ListField):
+ """A :class:`~mongoengine.ListField` designed specially to hold a list of
+ embedded documents to provide additional query helpers.
+
+ .. note::
+ The only valid list values are subclasses of
+ :class:`~mongoengine.EmbeddedDocument`.
+
+ .. versionadded:: 0.9
+
+ """
+
+ def __init__(self, document_type, *args, **kwargs):
+ """
+ :param document_type: The type of
+ :class:`~mongoengine.EmbeddedDocument` the list will hold.
+ :param args: Arguments passed directly into the parent
+ :class:`~mongoengine.ListField`.
+ :param kwargs: Keyword arguments passed directly into the parent
+ :class:`~mongoengine.ListField`.
+ """
+ super(EmbeddedDocumentListField, self).__init__(
+ field=EmbeddedDocumentField(document_type), **kwargs
+ )
+
+
+class SortedListField(ListField):
+
+ """A ListField that sorts the contents of its list before writing to
+ the database in order to ensure that a sorted list is always
+ retrieved.
+
+ .. warning::
+ There is a potential race condition when handling lists. If you set /
+ save the whole list then other processes trying to save the whole list
+ as well could overwrite changes. The safest way to append to a list is
+ to perform a push operation.
+
+ .. versionadded:: 0.4
+ .. versionchanged:: 0.6 - added reverse keyword
+ """
+
+ _ordering = None
+ _order_reverse = False
+
+ def __init__(self, field, **kwargs):
+ if 'ordering' in kwargs.keys():
+ self._ordering = kwargs.pop('ordering')
+ if 'reverse' in kwargs.keys():
+ self._order_reverse = kwargs.pop('reverse')
+ super(SortedListField, self).__init__(field, **kwargs)
+
+ def to_mongo(self, value):
+ value = super(SortedListField, self).to_mongo(value)
+ if self._ordering is not None:
+ return sorted(value, key=itemgetter(self._ordering),
+ reverse=self._order_reverse)
+ return sorted(value, reverse=self._order_reverse)
+
+
+def key_not_string(d):
+ """ Helper function to recursively determine if any key in a dictionary is
+ not a string.
+ """
+ for k, v in d.items():
+ if not isinstance(k, basestring) or (isinstance(v, dict) and key_not_string(v)):
+ return True
+
+
+def key_has_dot_or_dollar(d):
+ """ Helper function to recursively determine if any key in a dictionary
+ contains a dot or a dollar sign.
+ """
+ for k, v in d.items():
+ if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)):
+ return True
+
+
+class DictField(ComplexBaseField):
+
+ """A dictionary field that wraps a standard Python dictionary. This is
+ similar to an embedded document, but the structure is not defined.
+
+ .. note::
+ Required means it cannot be empty - as the default for DictFields is {}
+
+ .. versionadded:: 0.3
+ .. versionchanged:: 0.5 - Can now handle complex / varying types of data
+ """
+
+ def __init__(self, basecls=None, field=None, *args, **kwargs):
+ self.field = field
+ self.basecls = basecls or BaseField
+ if not issubclass(self.basecls, BaseField):
+ self.error('DictField only accepts dict values')
+ kwargs.setdefault('default', lambda: {})
+ super(DictField, self).__init__(*args, **kwargs)
+
+ def validate(self, value):
+ """Make sure that a list of valid fields is being used.
+ """
+ if not isinstance(value, dict):
+ self.error('Only dictionaries may be used in a DictField')
+
+ if key_not_string(value):
+ msg = ("Invalid dictionary key - documents must "
+ "have only string keys")
+ self.error(msg)
+ if key_has_dot_or_dollar(value):
+ self.error('Invalid dictionary key name - keys may not contain "."'
+ ' or "$" characters')
+ super(DictField, self).validate(value)
+
+ def lookup_member(self, member_name):
+ return DictField(basecls=self.basecls, db_field=member_name)
+
+ def prepare_query_value(self, op, value):
+ match_operators = ['contains', 'icontains', 'startswith',
+ 'istartswith', 'endswith', 'iendswith',
+ 'exact', 'iexact']
+
+ if op in match_operators and isinstance(value, basestring):
+ return StringField().prepare_query_value(op, value)
+
+ if hasattr(self.field, 'field'):
+ if op in ('set', 'unset') and isinstance(value, dict):
+ return dict(
+ (k, self.field.prepare_query_value(op, v))
+ for k, v in value.items())
+ return self.field.prepare_query_value(op, value)
+
+ return super(DictField, self).prepare_query_value(op, value)
+
+
+class MapField(DictField):
+
+ """A field that maps a name to a specified field type. Similar to
+ a DictField, except the 'value' of each item must match the specified
+ field type.
+
+ .. versionadded:: 0.5
+ """
+
+ def __init__(self, field=None, *args, **kwargs):
+ if not isinstance(field, BaseField):
+ self.error('Argument to MapField constructor must be a valid '
+ 'field')
+ super(MapField, self).__init__(field=field, *args, **kwargs)
+
+
+class ReferenceField(BaseField):
+
+ """A reference to a document that will be automatically dereferenced on
+ access (lazily).
+
+ Use the `reverse_delete_rule` to handle what should happen if the document
+ the field is referencing is deleted. EmbeddedDocuments, DictFields and
+ MapFields does not support reverse_delete_rule and an `InvalidDocumentError`
+ will be raised if trying to set on one of these Document / Field types.
+
+ The options are:
+
+ * DO_NOTHING - don't do anything (default).
+ * NULLIFY - Updates the reference to null.
+ * CASCADE - Deletes the documents associated with the reference.
+ * DENY - Prevent the deletion of the reference object.
+ * PULL - Pull the reference from a :class:`~mongoengine.fields.ListField`
+ of references
+
+ Alternative syntax for registering delete rules (useful when implementing
+ bi-directional delete rules)
+
+ .. code-block:: python
+
+ class Bar(Document):
+ content = StringField()
+ foo = ReferenceField('Foo')
+
+ Bar.register_delete_rule(Foo, 'bar', NULLIFY)
+
+ .. note ::
+ `reverse_delete_rule` does not trigger pre / post delete signals to be
+ triggered.
+
+ .. versionchanged:: 0.5 added `reverse_delete_rule`
+ """
+
+ def __init__(self, document_type, dbref=False,
+ reverse_delete_rule=DO_NOTHING, **kwargs):
+ """Initialises the Reference Field.
+
+ :param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
+ or as the :class:`~pymongo.objectid.ObjectId`.id .
+ :param reverse_delete_rule: Determines what to do when the referring
+ object is deleted
+ """
+ if not isinstance(document_type, basestring):
+ if not issubclass(document_type, (Document, basestring)):
+ self.error('Argument to ReferenceField constructor must be a '
+ 'document class or a string')
+
+ self.dbref = dbref
+ self.document_type_obj = document_type
+ self.reverse_delete_rule = reverse_delete_rule
+ super(ReferenceField, self).__init__(**kwargs)
+
+ @property
+ def document_type(self):
+ if isinstance(self.document_type_obj, basestring):
+ if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
+ self.document_type_obj = self.owner_document
+ else:
+ self.document_type_obj = get_document(self.document_type_obj)
+ return self.document_type_obj
+
+ def __get__(self, instance, owner):
+ """Descriptor to allow lazy dereferencing.
+ """
+ if instance is None:
+ # Document class being used rather than a document object
+ return self
+
+ # Get value from document instance if available
+ value = instance._data.get(self.name)
+ self._auto_dereference = instance._fields[self.name]._auto_dereference
+ # Dereference DBRefs
+ if self._auto_dereference and isinstance(value, DBRef):
+ value = self.document_type._get_db().dereference(value)
+ if value is not None:
+ instance._data[self.name] = self.document_type._from_son(value)
+
+ return super(ReferenceField, self).__get__(instance, owner)
+
+ def to_mongo(self, document):
+ if isinstance(document, DBRef):
+ if not self.dbref:
+ return document.id
+ return document
+
+ id_field_name = self.document_type._meta['id_field']
+ id_field = self.document_type._fields[id_field_name]
+
+ if isinstance(document, Document):
+ # We need the id from the saved object to create the DBRef
+ id_ = document.pk
+ if id_ is None:
+ self.error('You can only reference documents once they have'
+ ' been saved to the database')
+ else:
+ id_ = document
+
+ id_ = id_field.to_mongo(id_)
+ if self.dbref:
+ collection = self.document_type._get_collection_name()
+ return DBRef(collection, id_)
+
+ return id_
+
+ def to_python(self, value):
+ """Convert a MongoDB-compatible type to a Python type.
+ """
+ if (not self.dbref and
+ not isinstance(value, (DBRef, Document, EmbeddedDocument))):
+ collection = self.document_type._get_collection_name()
+ value = DBRef(collection, self.document_type.id.to_python(value))
+ return value
+
+ def prepare_query_value(self, op, value):
+ if value is None:
+ return None
+ return self.to_mongo(value)
+
+ def validate(self, value):
+
+ if not isinstance(value, (self.document_type, DBRef)):
+ self.error("A ReferenceField only accepts DBRef or documents")
+
+ if isinstance(value, Document) and value.id is None:
+ self.error('You can only reference documents once they have been '
+ 'saved to the database')
+
+ def lookup_member(self, member_name):
+ return self.document_type._fields.get(member_name)
+
+
+class CachedReferenceField(BaseField):
+
+ """
+ A referencefield with cache fields to porpuse pseudo-joins
+ .. versionadded:: 0.9
+ """
+
+ def __init__(self, document_type, fields=[], auto_sync=True, **kwargs):
+ """Initialises the Cached Reference Field.
+
+ :param fields: A list of fields to be cached in document
+ :param auto_sync: if True documents are auto updated.
+ """
+ if not isinstance(document_type, basestring) and \
+ not issubclass(document_type, (Document, basestring)):
+
+ self.error('Argument to CachedReferenceField constructor must be a'
+ ' document class or a string')
+
+ self.auto_sync = auto_sync
+ self.document_type_obj = document_type
+ self.fields = fields
+ super(CachedReferenceField, self).__init__(**kwargs)
+
+ def start_listener(self):
+ from mongoengine import signals
+ signals.post_save.connect(self.on_document_pre_save,
+ sender=self.document_type)
+
+ def on_document_pre_save(self, sender, document, created, **kwargs):
+ if not created:
+ update_kwargs = dict(
+ ('set__%s__%s' % (self.name, k), v)
+ for k, v in document._delta()[0].items()
+ if k in self.fields)
+
+ if update_kwargs:
+ filter_kwargs = {}
+ filter_kwargs[self.name] = document
+
+ self.owner_document.objects(
+ **filter_kwargs).update(**update_kwargs)
+
+ def to_python(self, value):
+ if isinstance(value, dict):
+ collection = self.document_type._get_collection_name()
+ value = DBRef(
+ collection, self.document_type.id.to_python(value['_id']))
+
+ return value
+
+ @property
+ def document_type(self):
+ if isinstance(self.document_type_obj, basestring):
+ if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
+ self.document_type_obj = self.owner_document
+ else:
+ self.document_type_obj = get_document(self.document_type_obj)
+ return self.document_type_obj
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ # Document class being used rather than a document object
+ return self
+
+ # Get value from document instance if available
+ value = instance._data.get(self.name)
+ self._auto_dereference = instance._fields[self.name]._auto_dereference
+ # Dereference DBRefs
+ if self._auto_dereference and isinstance(value, DBRef):
+ value = self.document_type._get_db().dereference(value)
+ if value is not None:
+ instance._data[self.name] = self.document_type._from_son(value)
+
+ return super(CachedReferenceField, self).__get__(instance, owner)
+
+ def to_mongo(self, document):
+ id_field_name = self.document_type._meta['id_field']
+ id_field = self.document_type._fields[id_field_name]
+ doc_tipe = self.document_type
+
+ if isinstance(document, Document):
+ # We need the id from the saved object to create the DBRef
+ id_ = document.pk
+ if id_ is None:
+ self.error('You can only reference documents once they have'
+ ' been saved to the database')
+ else:
+ self.error('Only accept a document object')
+
+ value = SON((
+ ("_id", id_field.to_mongo(id_)),
+ ))
+
+ value.update(dict(document.to_mongo(fields=self.fields)))
+ return value
+
+ def prepare_query_value(self, op, value):
+ if value is None:
+ return None
+
+ if isinstance(value, Document):
+ if value.pk is None:
+ self.error('You can only reference documents once they have'
+ ' been saved to the database')
+ return {'_id': value.pk}
+
+ raise NotImplementedError
+
+ def validate(self, value):
+
+ if not isinstance(value, (self.document_type)):
+ self.error("A CachedReferenceField only accepts documents")
+
+ if isinstance(value, Document) and value.id is None:
+ self.error('You can only reference documents once they have been '
+ 'saved to the database')
+
+ def lookup_member(self, member_name):
+ return self.document_type._fields.get(member_name)
+
+ def sync_all(self):
+ """
+ Sync all cached fields on demand.
+ Caution: this operation may be slower.
+ """
+ update_key = 'set__%s' % self.name
+
+ for doc in self.document_type.objects:
+ filter_kwargs = {}
+ filter_kwargs[self.name] = doc
+
+ update_kwargs = {}
+ update_kwargs[update_key] = doc
+
+ self.owner_document.objects(
+ **filter_kwargs).update(**update_kwargs)
+
+
+class GenericReferenceField(BaseField):
+
+ """A reference to *any* :class:`~mongoengine.document.Document` subclass
+ that will be automatically dereferenced on access (lazily).
+
+ .. note ::
+ * Any documents used as a generic reference must be registered in the
+ document registry. Importing the model will automatically register
+ it.
+
+ * You can use the choices param to limit the acceptable Document types
+
+ .. versionadded:: 0.3
+ """
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ return self
+
+ value = instance._data.get(self.name)
+
+ self._auto_dereference = instance._fields[self.name]._auto_dereference
+ if self._auto_dereference and isinstance(value, (dict, SON)):
+ instance._data[self.name] = self.dereference(value)
+
+ return super(GenericReferenceField, self).__get__(instance, owner)
+
+ def validate(self, value):
+ if not isinstance(value, (Document, DBRef, dict, SON)):
+ self.error('GenericReferences can only contain documents')
+
+ if isinstance(value, (dict, SON)):
+ if '_ref' not in value or '_cls' not in value:
+ self.error('GenericReferences can only contain documents')
+
+ # We need the id from the saved object to create the DBRef
+ elif isinstance(value, Document) and value.id is None:
+ self.error('You can only reference documents once they have been'
+ ' saved to the database')
+
+ def dereference(self, value):
+ doc_cls = get_document(value['_cls'])
+ reference = value['_ref']
+ doc = doc_cls._get_db().dereference(reference)
+ if doc is not None:
+ doc = doc_cls._from_son(doc)
+ return doc
+
+ def to_mongo(self, document, use_db_field=True):
+ if document is None:
+ return None
+
+ if isinstance(document, (dict, SON)):
+ return document
+
+ id_field_name = document.__class__._meta['id_field']
+ id_field = document.__class__._fields[id_field_name]
+
+ if isinstance(document, Document):
+ # We need the id from the saved object to create the DBRef
+ id_ = document.id
+ if id_ is None:
+ self.error('You can only reference documents once they have'
+ ' been saved to the database')
+ else:
+ id_ = document
+
+ id_ = id_field.to_mongo(id_)
+ collection = document._get_collection_name()
+ ref = DBRef(collection, id_)
+ return SON((
+ ('_cls', document._class_name),
+ ('_ref', ref)
+ ))
+
+ def prepare_query_value(self, op, value):
+ if value is None:
+ return None
+
+ return self.to_mongo(value)
+
+
+class BinaryField(BaseField):
+
+ """A binary data field.
+ """
+
+ def __init__(self, max_bytes=None, **kwargs):
+ self.max_bytes = max_bytes
+ super(BinaryField, self).__init__(**kwargs)
+
+ def __set__(self, instance, value):
+ """Handle bytearrays in python 3.1"""
+ if PY3 and isinstance(value, bytearray):
+ value = bin_type(value)
+ return super(BinaryField, self).__set__(instance, value)
+
+ def to_mongo(self, value):
+ return Binary(value)
+
+ def validate(self, value):
+ if not isinstance(value, (bin_type, txt_type, Binary)):
+ self.error("BinaryField only accepts instances of "
+ "(%s, %s, Binary)" % (
+ bin_type.__name__, txt_type.__name__))
+
+ if self.max_bytes is not None and len(value) > self.max_bytes:
+ self.error('Binary value is too long')
+
+
+class GridFSError(Exception):
+ pass
+
+
+class GridFSProxy(object):
+
+ """Proxy object to handle writing and reading of files to and from GridFS
+
+ .. versionadded:: 0.4
+ .. versionchanged:: 0.5 - added optional size param to read
+ .. versionchanged:: 0.6 - added collection name param
+ """
+
+ _fs = None
+
+ def __init__(self, grid_id=None, key=None,
+ instance=None,
+ db_alias=DEFAULT_CONNECTION_NAME,
+ collection_name='fs'):
+ self.grid_id = grid_id # Store GridFS id for file
+ self.key = key
+ self.instance = instance
+ self.db_alias = db_alias
+ self.collection_name = collection_name
+ self.newfile = None # Used for partial writes
+ self.gridout = None
+
+ def __getattr__(self, name):
+ attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias',
+ 'collection_name', 'newfile', 'gridout')
+ if name in attrs:
+ return self.__getattribute__(name)
+ obj = self.get()
+ if hasattr(obj, name):
+ return getattr(obj, name)
+ raise AttributeError
+
+ def __get__(self, instance, value):
+ return self
+
+ def __nonzero__(self):
+ return bool(self.grid_id)
+
+ def __getstate__(self):
+ self_dict = self.__dict__
+ self_dict['_fs'] = None
+ return self_dict
+
+ def __copy__(self):
+ copied = GridFSProxy()
+ copied.__dict__.update(self.__getstate__())
+ return copied
+
+ def __deepcopy__(self, memo):
+ return self.__copy__()
+
+ def __repr__(self):
+ return '<%s: %s>' % (self.__class__.__name__, self.grid_id)
+
+ def __str__(self):
+ name = getattr(
+ self.get(), 'filename', self.grid_id) if self.get() else '(no file)'
+ return '<%s: %s>' % (self.__class__.__name__, name)
+
+ def __eq__(self, other):
+ if isinstance(other, GridFSProxy):
+ return ((self.grid_id == other.grid_id) and
+ (self.collection_name == other.collection_name) and
+ (self.db_alias == other.db_alias))
+ else:
+ return False
+
+ @property
+ def fs(self):
+ if not self._fs:
+ self._fs = gridfs.GridFS(
+ get_db(self.db_alias), self.collection_name)
+ return self._fs
+
+ def get(self, id=None):
+ if id:
+ self.grid_id = id
+ if self.grid_id is None:
+ return None
+ try:
+ if self.gridout is None:
+ self.gridout = self.fs.get(self.grid_id)
+ return self.gridout
+ except:
+ # File has been deleted
+ return None
+
+ def new_file(self, **kwargs):
+ self.newfile = self.fs.new_file(**kwargs)
+ self.grid_id = self.newfile._id
+ self._mark_as_changed()
+
+ def put(self, file_obj, **kwargs):
+ if self.grid_id:
+ raise GridFSError('This document already has a file. Either delete '
+ 'it or call replace to overwrite it')
+ self.grid_id = self.fs.put(file_obj, **kwargs)
+ self._mark_as_changed()
+
+ def write(self, string):
+ if self.grid_id:
+ if not self.newfile:
+ raise GridFSError('This document already has a file. Either '
+ 'delete it or call replace to overwrite it')
+ else:
+ self.new_file()
+ self.newfile.write(string)
+
+ def writelines(self, lines):
+ if not self.newfile:
+ self.new_file()
+ self.grid_id = self.newfile._id
+ self.newfile.writelines(lines)
+
+ def read(self, size=-1):
+ gridout = self.get()
+ if gridout is None:
+ return None
+ else:
+ try:
+ return gridout.read(size)
+ except:
+ return ""
+
+ def delete(self):
+ # Delete file from GridFS, FileField still remains
+ self.fs.delete(self.grid_id)
+ self.grid_id = None
+ self.gridout = None
+ self._mark_as_changed()
+
+ def replace(self, file_obj, **kwargs):
+ self.delete()
+ self.put(file_obj, **kwargs)
+
+ def close(self):
+ if self.newfile:
+ self.newfile.close()
+
+ def _mark_as_changed(self):
+ """Inform the instance that `self.key` has been changed"""
+ if self.instance:
+ self.instance._mark_as_changed(self.key)
+
+
+class FileField(BaseField):
+
+ """A GridFS storage field.
+
+ .. versionadded:: 0.4
+ .. versionchanged:: 0.5 added optional size param for read
+ .. versionchanged:: 0.6 added db_alias for multidb support
+ """
+ proxy_class = GridFSProxy
+
+ def __init__(self,
+ db_alias=DEFAULT_CONNECTION_NAME,
+ collection_name="fs", **kwargs):
+ super(FileField, self).__init__(**kwargs)
+ self.collection_name = collection_name
+ self.db_alias = db_alias
+
+ def __get__(self, instance, owner):
+ if instance is None:
+ return self
+
+ # Check if a file already exists for this model
+ grid_file = instance._data.get(self.name)
+ if not isinstance(grid_file, self.proxy_class):
+ grid_file = self.get_proxy_obj(key=self.name, instance=instance)
+ instance._data[self.name] = grid_file
+
+ if not grid_file.key:
+ grid_file.key = self.name
+ grid_file.instance = instance
+ return grid_file
+
+ def __set__(self, instance, value):
+ key = self.name
+ if ((hasattr(value, 'read') and not
+ isinstance(value, GridFSProxy)) or isinstance(value, str_types)):
+ # using "FileField() = file/string" notation
+ grid_file = instance._data.get(self.name)
+ # If a file already exists, delete it
+ if grid_file:
+ try:
+ grid_file.delete()
+ except:
+ pass
+
+ # Create a new proxy object as we don't already have one
+ instance._data[key] = self.get_proxy_obj(
+ key=key, instance=instance)
+ instance._data[key].put(value)
+ else:
+ instance._data[key] = value
+
+ instance._mark_as_changed(key)
+
+ def get_proxy_obj(self, key, instance, db_alias=None, collection_name=None):
+ if db_alias is None:
+ db_alias = self.db_alias
+ if collection_name is None:
+ collection_name = self.collection_name
+
+ return self.proxy_class(key=key, instance=instance,
+ db_alias=db_alias,
+ collection_name=collection_name)
+
+ def to_mongo(self, value):
+ # Store the GridFS file id in MongoDB
+ if isinstance(value, self.proxy_class) and value.grid_id is not None:
+ return value.grid_id
+ return None
+
+ def to_python(self, value):
+ if value is not None:
+ return self.proxy_class(value,
+ collection_name=self.collection_name,
+ db_alias=self.db_alias)
+
+ def validate(self, value):
+ if value.grid_id is not None:
+ if not isinstance(value, self.proxy_class):
+ self.error('FileField only accepts GridFSProxy values')
+ if not isinstance(value.grid_id, ObjectId):
+ self.error('Invalid GridFSProxy value')
+
+
+class ImageGridFsProxy(GridFSProxy):
+
+ """
+ Proxy for ImageField
+
+ versionadded: 0.6
+ """
+
+ def put(self, file_obj, **kwargs):
+ """
+ Insert a image in database
+ applying field properties (size, thumbnail_size)
+ """
+ field = self.instance._fields[self.key]
+ # Handle nested fields
+ if hasattr(field, 'field') and isinstance(field.field, FileField):
+ field = field.field
+
+ try:
+ img = Image.open(file_obj)
+ img_format = img.format
+ except Exception, e:
+ raise ValidationError('Invalid image: %s' % e)
+
+ # Progressive JPEG
+ progressive = img.info.get('progressive') or False
+
+ if (kwargs.get('progressive') and
+ isinstance(kwargs.get('progressive'), bool) and
+ img_format == 'JPEG'):
+ progressive = True
+ else:
+ progressive = False
+
+ if (field.size and (img.size[0] > field.size['width'] or
+ img.size[1] > field.size['height'])):
+ size = field.size
+
+ if size['force']:
+ img = ImageOps.fit(img,
+ (size['width'],
+ size['height']),
+ Image.ANTIALIAS)
+ else:
+ img.thumbnail((size['width'],
+ size['height']),
+ Image.ANTIALIAS)
+
+ thumbnail = None
+ if field.thumbnail_size:
+ size = field.thumbnail_size
+
+ if size['force']:
+ thumbnail = ImageOps.fit(
+ img, (size['width'], size['height']), Image.ANTIALIAS)
+ else:
+ thumbnail = img.copy()
+ thumbnail.thumbnail((size['width'],
+ size['height']),
+ Image.ANTIALIAS)
+
+ if thumbnail:
+ thumb_id = self._put_thumbnail(thumbnail, img_format, progressive)
+ else:
+ thumb_id = None
+
+ w, h = img.size
+
+ io = StringIO()
+ img.save(io, img_format, progressive=progressive)
+ io.seek(0)
+
+ return super(ImageGridFsProxy, self).put(io,
+ width=w,
+ height=h,
+ format=img_format,
+ thumbnail_id=thumb_id,
+ **kwargs)
+
+ def delete(self, *args, **kwargs):
+ # deletes thumbnail
+ out = self.get()
+ if out and out.thumbnail_id:
+ self.fs.delete(out.thumbnail_id)
+
+ return super(ImageGridFsProxy, self).delete(*args, **kwargs)
+
+ def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
+ w, h = thumbnail.size
+
+ io = StringIO()
+ thumbnail.save(io, format, progressive=progressive)
+ io.seek(0)
+
+ return self.fs.put(io, width=w,
+ height=h,
+ format=format,
+ **kwargs)
+
+ @property
+ def size(self):
+ """
+ return a width, height of image
+ """
+ out = self.get()
+ if out:
+ return out.width, out.height
+
+ @property
+ def format(self):
+ """
+ return format of image
+ ex: PNG, JPEG, GIF, etc
+ """
+ out = self.get()
+ if out:
+ return out.format
+
+ @property
+ def thumbnail(self):
+ """
+ return a gridfs.grid_file.GridOut
+ representing a thumbnail of Image
+ """
+ out = self.get()
+ if out and out.thumbnail_id:
+ return self.fs.get(out.thumbnail_id)
+
+ def write(self, *args, **kwargs):
+ raise RuntimeError("Please use \"put\" method instead")
+
+ def writelines(self, *args, **kwargs):
+ raise RuntimeError("Please use \"put\" method instead")
+
+
+class ImproperlyConfigured(Exception):
+ pass
+
+
+class ImageField(FileField):
+
+ """
+ A Image File storage field.
+
+ @size (width, height, force):
+ max size to store images, if larger will be automatically resized
+ ex: size=(800, 600, True)
+
+ @thumbnail (width, height, force):
+ size to generate a thumbnail
+
+ .. versionadded:: 0.6
+ """
+ proxy_class = ImageGridFsProxy
+
+ def __init__(self, size=None, thumbnail_size=None,
+ collection_name='images', **kwargs):
+ if not Image:
+ raise ImproperlyConfigured("PIL library was not found")
+
+ params_size = ('width', 'height', 'force')
+ extra_args = dict(size=size, thumbnail_size=thumbnail_size)
+ for att_name, att in extra_args.items():
+ value = None
+ if isinstance(att, (tuple, list)):
+ if PY3:
+ value = dict(itertools.zip_longest(params_size, att,
+ fillvalue=None))
+ else:
+ value = dict(map(None, params_size, att))
+
+ setattr(self, att_name, value)
+
+ super(ImageField, self).__init__(
+ collection_name=collection_name,
+ **kwargs)
+
+
+class SequenceField(BaseField):
+
+ """Provides a sequential counter see:
+ http://www.mongodb.org/display/DOCS/Object+IDs#ObjectIDs-SequenceNumbers
+
+ .. note::
+
+ Although traditional databases often use increasing sequence
+ numbers for primary keys. In MongoDB, the preferred approach is to
+ use Object IDs instead. The concept is that in a very large
+ cluster of machines, it is easier to create an object ID than have
+ global, uniformly increasing sequence numbers.
+
+ Use any callable as `value_decorator` to transform calculated counter into
+ any value suitable for your needs, e.g. string or hexadecimal
+ representation of the default integer counter value.
+
+ .. versionadded:: 0.5
+
+ .. versionchanged:: 0.8 added `value_decorator`
+ """
+
+ _auto_gen = True
+ COLLECTION_NAME = 'mongoengine.counters'
+ VALUE_DECORATOR = int
+
+ def __init__(self, collection_name=None, db_alias=None, sequence_name=None,
+ value_decorator=None, *args, **kwargs):
+ self.collection_name = collection_name or self.COLLECTION_NAME
+ self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
+ self.sequence_name = sequence_name
+ self.value_decorator = (callable(value_decorator) and
+ value_decorator or self.VALUE_DECORATOR)
+ return super(SequenceField, self).__init__(*args, **kwargs)
+
+ def generate(self):
+ """
+ Generate and Increment the counter
+ """
+ sequence_name = self.get_sequence_name()
+ sequence_id = "%s.%s" % (sequence_name, self.name)
+ collection = get_db(alias=self.db_alias)[self.collection_name]
+ counter = collection.find_and_modify(query={"_id": sequence_id},
+ update={"$inc": {"next": 1}},
+ new=True,
+ upsert=True)
+ return self.value_decorator(counter['next'])
+
+ def set_next_value(self, value):
+ """Helper method to set the next sequence value"""
+ sequence_name = self.get_sequence_name()
+ sequence_id = "%s.%s" % (sequence_name, self.name)
+ collection = get_db(alias=self.db_alias)[self.collection_name]
+ counter = collection.find_and_modify(query={"_id": sequence_id},
+ update={"$set": {"next": value}},
+ new=True,
+ upsert=True)
+ return self.value_decorator(counter['next'])
+
+ def get_next_value(self):
+ """Helper method to get the next value for previewing.
+
+ .. warning:: There is no guarantee this will be the next value
+ as it is only fixed on set.
+ """
+ sequence_name = self.get_sequence_name()
+ sequence_id = "%s.%s" % (sequence_name, self.name)
+ collection = get_db(alias=self.db_alias)[self.collection_name]
+ data = collection.find_one({"_id": sequence_id})
+
+ if data:
+ return self.value_decorator(data['next'] + 1)
+
+ return self.value_decorator(1)
+
+ def get_sequence_name(self):
+ if self.sequence_name:
+ return self.sequence_name
+ owner = self.owner_document
+ if issubclass(owner, Document):
+ return owner._get_collection_name()
+ else:
+ return ''.join('_%s' % c if c.isupper() else c
+ for c in owner._class_name).strip('_').lower()
+
+ def __get__(self, instance, owner):
+ value = super(SequenceField, self).__get__(instance, owner)
+ if value is None and instance._initialised:
+ value = self.generate()
+ instance._data[self.name] = value
+ instance._mark_as_changed(self.name)
+
+ return value
+
+ def __set__(self, instance, value):
+
+ if value is None and instance._initialised:
+ value = self.generate()
+
+ return super(SequenceField, self).__set__(instance, value)
+
+ def prepare_query_value(self, op, value):
+ """
+ This method is overridden in order to convert the query value into to required
+ type. We need to do this in order to be able to successfully compare query
+ values passed as string, the base implementation returns the value as is.
+ """
+ return self.value_decorator(value)
+
+ def to_python(self, value):
+ if value is None:
+ value = self.generate()
+ return value
+
+
+class UUIDField(BaseField):
+
+ """A UUID field.
+
+ .. versionadded:: 0.6
+ """
+ _binary = None
+
+ def __init__(self, binary=True, **kwargs):
+ """
+ Store UUID data in the database
+
+ :param binary: if False store as a string.
+
+ .. versionchanged:: 0.8.0
+ .. versionchanged:: 0.6.19
+ """
+ self._binary = binary
+ super(UUIDField, self).__init__(**kwargs)
+
+ def to_python(self, value):
+ if not self._binary:
+ original_value = value
+ try:
+ if not isinstance(value, basestring):
+ value = unicode(value)
+ return uuid.UUID(value)
+ except:
+ return original_value
+ return value
+
+ def to_mongo(self, value):
+ if not self._binary:
+ return unicode(value)
+ elif isinstance(value, basestring):
+ return uuid.UUID(value)
+ return value
+
+ def prepare_query_value(self, op, value):
+ if value is None:
+ return None
+ return self.to_mongo(value)
+
+ def validate(self, value):
+ if not isinstance(value, uuid.UUID):
+ if not isinstance(value, basestring):
+ value = str(value)
+ try:
+ value = uuid.UUID(value)
+ except Exception, exc:
+ self.error('Could not convert to UUID: %s' % exc)
+
+
+class GeoPointField(BaseField):
+
+ """A list storing a longitude and latitude coordinate.
+
+ .. note:: this represents a generic point in a 2D plane and a legacy way of
+ representing a geo point. It admits 2d indexes but not "2dsphere" indexes
+ in MongoDB > 2.4 which are more natural for modeling geospatial points.
+ See :ref:`geospatial-indexes`
+
+ .. versionadded:: 0.4
+ """
+
+ _geo_index = pymongo.GEO2D
+
+ def validate(self, value):
+ """Make sure that a geo-value is of type (x, y)
+ """
+ if not isinstance(value, (list, tuple)):
+ self.error('GeoPointField can only accept tuples or lists '
+ 'of (x, y)')
+
+ if not len(value) == 2:
+ self.error("Value (%s) must be a two-dimensional point" %
+ repr(value))
+ elif (not isinstance(value[0], (float, int)) or
+ not isinstance(value[1], (float, int))):
+ self.error(
+ "Both values (%s) in point must be float or int" % repr(value))
+
+
+class PointField(GeoJsonBaseField):
+
+ """A GeoJSON field storing a longitude and latitude coordinate.
+
+ The data is represented as:
+
+ .. code-block:: js
+
+ { "type" : "Point" ,
+ "coordinates" : [x, y]}
+
+ You can either pass a dict with the full information or a list
+ to set the value.
+
+ Requires mongodb >= 2.4
+
+ .. versionadded:: 0.8
+ """
+ _type = "Point"
+
+
+class LineStringField(GeoJsonBaseField):
+
+ """A GeoJSON field storing a line of longitude and latitude coordinates.
+
+ The data is represented as:
+
+ .. code-block:: js
+
+ { "type" : "LineString" ,
+ "coordinates" : [[x1, y1], [x1, y1] ... [xn, yn]]}
+
+ You can either pass a dict with the full information or a list of points.
+
+ Requires mongodb >= 2.4
+
+ .. versionadded:: 0.8
+ """
+ _type = "LineString"
+
+
+class PolygonField(GeoJsonBaseField):
+
+ """A GeoJSON field storing a polygon of longitude and latitude coordinates.
+
+ The data is represented as:
+
+ .. code-block:: js
+
+ { "type" : "Polygon" ,
+ "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
+ [[x1, y1], [x1, y1] ... [xn, yn]]}
+
+ You can either pass a dict with the full information or a list
+ of LineStrings. The first LineString being the outside and the rest being
+ holes.
+
+ Requires mongodb >= 2.4
+
+ .. versionadded:: 0.8
+ """
+ _type = "Polygon"
+
+
+class MultiPointField(GeoJsonBaseField):
+
+ """A GeoJSON field storing a list of Points.
+
+ The data is represented as:
+
+ .. code-block:: js
+
+ { "type" : "MultiPoint" ,
+ "coordinates" : [[x1, y1], [x2, y2]]}
+
+ You can either pass a dict with the full information or a list
+ to set the value.
+
+ Requires mongodb >= 2.6
+
+ .. versionadded:: 0.9
+ """
+ _type = "MultiPoint"
+
+
+class MultiLineStringField(GeoJsonBaseField):
+
+ """A GeoJSON field storing a list of LineStrings.
+
+ The data is represented as:
+
+ .. code-block:: js
+
+ { "type" : "MultiLineString" ,
+ "coordinates" : [[[x1, y1], [x1, y1] ... [xn, yn]],
+ [[x1, y1], [x1, y1] ... [xn, yn]]]}
+
+ You can either pass a dict with the full information or a list of points.
+
+ Requires mongodb >= 2.6
+
+ .. versionadded:: 0.9
+ """
+ _type = "MultiLineString"
+
+
+class MultiPolygonField(GeoJsonBaseField):
+
+ """A GeoJSON field storing list of Polygons.
+
+ The data is represented as:
+
+ .. code-block:: js
+
+ { "type" : "MultiPolygon" ,
+ "coordinates" : [[
+ [[x1, y1], [x1, y1] ... [xn, yn]],
+ [[x1, y1], [x1, y1] ... [xn, yn]]
+ ], [
+ [[x1, y1], [x1, y1] ... [xn, yn]],
+ [[x1, y1], [x1, y1] ... [xn, yn]]
+ ]
+ }
+
+ You can either pass a dict with the full information or a list
+ of Polygons.
+
+ Requires mongodb >= 2.6
+
+ .. versionadded:: 0.9
+ """
+ _type = "MultiPolygon"
diff --git a/awx/lib/site-packages/mongoengine/python_support.py b/awx/lib/site-packages/mongoengine/python_support.py
new file mode 100644
index 0000000000..2c4df00c2f
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/python_support.py
@@ -0,0 +1,29 @@
+"""Helper functions and types to aid with Python 2.5 - 3 support."""
+
+import sys
+
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ import codecs
+ from io import BytesIO as StringIO
+ # return s converted to binary. b('test') should be equivalent to b'test'
+ def b(s):
+ return codecs.latin_1_encode(s)[0]
+
+ bin_type = bytes
+ txt_type = str
+else:
+ try:
+ from cStringIO import StringIO
+ except ImportError:
+ from StringIO import StringIO
+
+ # Conversion to binary only necessary in Python 3
+ def b(s):
+ return s
+
+ bin_type = str
+ txt_type = unicode
+
+str_types = (bin_type, txt_type)
diff --git a/awx/lib/site-packages/mongoengine/queryset/__init__.py b/awx/lib/site-packages/mongoengine/queryset/__init__.py
new file mode 100644
index 0000000000..026a7acdd5
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/queryset/__init__.py
@@ -0,0 +1,11 @@
+from mongoengine.errors import (DoesNotExist, MultipleObjectsReturned,
+ InvalidQueryError, OperationError,
+ NotUniqueError)
+from mongoengine.queryset.field_list import *
+from mongoengine.queryset.manager import *
+from mongoengine.queryset.queryset import *
+from mongoengine.queryset.transform import *
+from mongoengine.queryset.visitor import *
+
+__all__ = (field_list.__all__ + manager.__all__ + queryset.__all__ +
+ transform.__all__ + visitor.__all__)
diff --git a/awx/lib/site-packages/mongoengine/queryset/base.py b/awx/lib/site-packages/mongoengine/queryset/base.py
new file mode 100644
index 0000000000..7ffb9976f8
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/queryset/base.py
@@ -0,0 +1,1750 @@
+from __future__ import absolute_import
+
+import copy
+import itertools
+import operator
+import pprint
+import re
+import warnings
+
+from bson import SON
+from bson.code import Code
+from bson import json_util
+import pymongo
+import pymongo.errors
+from pymongo.common import validate_read_preference
+
+from mongoengine import signals
+from mongoengine.connection import get_db
+from mongoengine.context_managers import switch_db
+from mongoengine.common import _import_class
+from mongoengine.base.common import get_document
+from mongoengine.errors import (OperationError, NotUniqueError,
+ InvalidQueryError, LookUpError)
+from mongoengine.queryset import transform
+from mongoengine.queryset.field_list import QueryFieldList
+from mongoengine.queryset.visitor import Q, QNode
+
+
+__all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL')
+
+# Delete rules
+DO_NOTHING = 0
+NULLIFY = 1
+CASCADE = 2
+DENY = 3
+PULL = 4
+
+RE_TYPE = type(re.compile(''))
+
+
+class BaseQuerySet(object):
+
+ """A set of results returned from a query. Wraps a MongoDB cursor,
+ providing :class:`~mongoengine.Document` objects as the results.
+ """
+ __dereference = False
+ _auto_dereference = True
+
+ def __init__(self, document, collection):
+ self._document = document
+ self._collection_obj = collection
+ self._mongo_query = None
+ self._query_obj = Q()
+ self._initial_query = {}
+ self._where_clause = None
+ self._loaded_fields = QueryFieldList()
+ self._ordering = None
+ self._snapshot = False
+ self._timeout = True
+ self._class_check = True
+ self._slave_okay = False
+ self._read_preference = None
+ self._iter = False
+ self._scalar = []
+ self._none = False
+ self._as_pymongo = False
+ self._as_pymongo_coerce = False
+ self._search_text = None
+
+ # If inheritance is allowed, only return instances and instances of
+ # subclasses of the class being used
+ if document._meta.get('allow_inheritance') is True:
+ if len(self._document._subclasses) == 1:
+ self._initial_query = {"_cls": self._document._subclasses[0]}
+ else:
+ self._initial_query = {
+ "_cls": {"$in": self._document._subclasses}}
+ self._loaded_fields = QueryFieldList(always_include=['_cls'])
+ self._cursor_obj = None
+ self._limit = None
+ self._skip = None
+ self._hint = -1 # Using -1 as None is a valid value for hint
+ self.only_fields = []
+ self._max_time_ms = None
+
+ def __call__(self, q_obj=None, class_check=True, slave_okay=False,
+ read_preference=None, **query):
+ """Filter the selected documents by calling the
+ :class:`~mongoengine.queryset.QuerySet` with a query.
+
+ :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in
+ the query; the :class:`~mongoengine.queryset.QuerySet` is filtered
+ multiple times with different :class:`~mongoengine.queryset.Q`
+ objects, only the last one will be used
+ :param class_check: If set to False bypass class name check when
+ querying collection
+ :param slave_okay: if True, allows this query to be run against a
+ replica secondary.
+ :params read_preference: if set, overrides connection-level
+ read_preference from `ReplicaSetConnection`.
+ :param query: Django-style query keyword arguments
+ """
+ query = Q(**query)
+ if q_obj:
+ # make sure proper query object is passed
+ if not isinstance(q_obj, QNode):
+ msg = ("Not a query object: %s. "
+ "Did you intend to use key=value?" % q_obj)
+ raise InvalidQueryError(msg)
+ query &= q_obj
+
+ if read_preference is None:
+ queryset = self.clone()
+ else:
+ # Use the clone provided when setting read_preference
+ queryset = self.read_preference(read_preference)
+
+ queryset._query_obj &= query
+ queryset._mongo_query = None
+ queryset._cursor_obj = None
+ queryset._class_check = class_check
+
+ return queryset
+
+ def __getitem__(self, key):
+ """Support skip and limit using getitem and slicing syntax.
+ """
+ queryset = self.clone()
+
+ # Slice provided
+ if isinstance(key, slice):
+ try:
+ queryset._cursor_obj = queryset._cursor[key]
+ queryset._skip, queryset._limit = key.start, key.stop
+ if key.start and key.stop:
+ queryset._limit = key.stop - key.start
+ except IndexError, err:
+ # PyMongo raises an error if key.start == key.stop, catch it,
+ # bin it, kill it.
+ start = key.start or 0
+ if start >= 0 and key.stop >= 0 and key.step is None:
+ if start == key.stop:
+ queryset.limit(0)
+ queryset._skip = key.start
+ queryset._limit = key.stop - start
+ return queryset
+ raise err
+ # Allow further QuerySet modifications to be performed
+ return queryset
+ # Integer index provided
+ elif isinstance(key, int):
+ if queryset._scalar:
+ return queryset._get_scalar(
+ queryset._document._from_son(queryset._cursor[key],
+ _auto_dereference=self._auto_dereference,
+ only_fields=self.only_fields))
+
+ if queryset._as_pymongo:
+ return queryset._get_as_pymongo(queryset._cursor[key])
+ return queryset._document._from_son(queryset._cursor[key],
+ _auto_dereference=self._auto_dereference, only_fields=self.only_fields)
+
+ raise AttributeError
+
+ def __iter__(self):
+ raise NotImplementedError
+
+ def _has_data(self):
+ """ Retrieves whether cursor has any data. """
+
+ queryset = self.order_by()
+ return False if queryset.first() is None else True
+
+ def __nonzero__(self):
+ """ Avoid to open all records in an if stmt in Py2. """
+
+ return self._has_data()
+
+ def __bool__(self):
+ """ Avoid to open all records in an if stmt in Py3. """
+
+ return self._has_data()
+
+ # Core functions
+
+ def all(self):
+ """Returns all documents."""
+ return self.__call__()
+
+ def filter(self, *q_objs, **query):
+ """An alias of :meth:`~mongoengine.queryset.QuerySet.__call__`
+ """
+ return self.__call__(*q_objs, **query)
+
+ def search_text(self, text, language=None):
+ """
+ Start a text search, using text indexes.
+ Require: MongoDB server version 2.6+.
+
+ :param language: The language that determines the list of stop words
+ for the search and the rules for the stemmer and tokenizer.
+ If not specified, the search uses the default language of the index.
+ For supported languages, see `Text Search Languages `.
+ """
+ queryset = self.clone()
+ if queryset._search_text:
+ raise OperationError(
+ "It is not possible to use search_text two times.")
+
+ query_kwargs = SON({'$search': text})
+ if language:
+ query_kwargs['$language'] = language
+
+ queryset._query_obj &= Q(__raw__={'$text': query_kwargs})
+ queryset._mongo_query = None
+ queryset._cursor_obj = None
+ queryset._search_text = text
+
+ return queryset
+
+ def get(self, *q_objs, **query):
+ """Retrieve the the matching object raising
+ :class:`~mongoengine.queryset.MultipleObjectsReturned` or
+ `DocumentName.MultipleObjectsReturned` exception if multiple results
+ and :class:`~mongoengine.queryset.DoesNotExist` or
+ `DocumentName.DoesNotExist` if no results are found.
+
+ .. versionadded:: 0.3
+ """
+ queryset = self.clone()
+ queryset = queryset.order_by().limit(2)
+ queryset = queryset.filter(*q_objs, **query)
+
+ try:
+ result = queryset.next()
+ except StopIteration:
+ msg = ("%s matching query does not exist."
+ % queryset._document._class_name)
+ raise queryset._document.DoesNotExist(msg)
+ try:
+ queryset.next()
+ except StopIteration:
+ return result
+
+ queryset.rewind()
+ message = u'%d items returned, instead of 1' % queryset.count()
+ raise queryset._document.MultipleObjectsReturned(message)
+
+ def create(self, **kwargs):
+ """Create new object. Returns the saved object instance.
+
+ .. versionadded:: 0.4
+ """
+ return self._document(**kwargs).save()
+
+ def get_or_create(self, write_concern=None, auto_save=True,
+ *q_objs, **query):
+ """Retrieve unique object or create, if it doesn't exist. Returns a
+ tuple of ``(object, created)``, where ``object`` is the retrieved or
+ created object and ``created`` is a boolean specifying whether a new
+ object was created. Raises
+ :class:`~mongoengine.queryset.MultipleObjectsReturned` or
+ `DocumentName.MultipleObjectsReturned` if multiple results are found.
+ A new document will be created if the document doesn't exists; a
+ dictionary of default values for the new document may be provided as a
+ keyword argument called :attr:`defaults`.
+
+ .. note:: This requires two separate operations and therefore a
+ race condition exists. Because there are no transactions in
+ mongoDB other approaches should be investigated, to ensure you
+ don't accidentally duplicate data when using this method. This is
+ now scheduled to be removed before 1.0
+
+ :param write_concern: optional extra keyword arguments used if we
+ have to create a new document.
+ Passes any write_concern onto :meth:`~mongoengine.Document.save`
+
+ :param auto_save: if the object is to be saved automatically if
+ not found.
+
+ .. deprecated:: 0.8
+ .. versionchanged:: 0.6 - added `auto_save`
+ .. versionadded:: 0.3
+ """
+ msg = ("get_or_create is scheduled to be deprecated. The approach is "
+ "flawed without transactions. Upserts should be preferred.")
+ warnings.warn(msg, DeprecationWarning)
+
+ defaults = query.get('defaults', {})
+ if 'defaults' in query:
+ del query['defaults']
+
+ try:
+ doc = self.get(*q_objs, **query)
+ return doc, False
+ except self._document.DoesNotExist:
+ query.update(defaults)
+ doc = self._document(**query)
+
+ if auto_save:
+ doc.save(write_concern=write_concern)
+ return doc, True
+
+ def first(self):
+ """Retrieve the first object matching the query.
+ """
+ queryset = self.clone()
+ try:
+ result = queryset[0]
+ except IndexError:
+ result = None
+ return result
+
+ def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
+ """bulk insert documents
+
+ :param docs_or_doc: a document or list of documents to be inserted
+ :param load_bulk (optional): If True returns the list of document
+ instances
+ :param write_concern: Extra keyword arguments are passed down to
+ :meth:`~pymongo.collection.Collection.insert`
+ which will be used as options for the resultant
+ ``getLastError`` command. For example,
+ ``insert(..., {w: 2, fsync: True})`` will wait until at least
+ two servers have recorded the write and will force an fsync on
+ each server being written to.
+
+ By default returns document instances, set ``load_bulk`` to False to
+ return just ``ObjectIds``
+
+ .. versionadded:: 0.5
+ """
+ Document = _import_class('Document')
+
+ if write_concern is None:
+ write_concern = {}
+
+ docs = doc_or_docs
+ return_one = False
+ if isinstance(docs, Document) or issubclass(docs.__class__, Document):
+ return_one = True
+ docs = [docs]
+
+ raw = []
+ for doc in docs:
+ if not isinstance(doc, self._document):
+ msg = ("Some documents inserted aren't instances of %s"
+ % str(self._document))
+ raise OperationError(msg)
+ if doc.pk and not doc._created:
+ msg = "Some documents have ObjectIds use doc.update() instead"
+ raise OperationError(msg)
+ raw.append(doc.to_mongo())
+
+ signals.pre_bulk_insert.send(self._document, documents=docs)
+ try:
+ ids = self._collection.insert(raw, **write_concern)
+ except pymongo.errors.DuplicateKeyError, err:
+ message = 'Could not save document (%s)'
+ raise NotUniqueError(message % unicode(err))
+ except pymongo.errors.OperationFailure, err:
+ message = 'Could not save document (%s)'
+ if re.match('^E1100[01] duplicate key', unicode(err)):
+ # E11000 - duplicate key error index
+ # E11001 - duplicate key on update
+ message = u'Tried to save duplicate unique keys (%s)'
+ raise NotUniqueError(message % unicode(err))
+ raise OperationError(message % unicode(err))
+
+ if not load_bulk:
+ signals.post_bulk_insert.send(
+ self._document, documents=docs, loaded=False)
+ return return_one and ids[0] or ids
+
+ documents = self.in_bulk(ids)
+ results = []
+ for obj_id in ids:
+ results.append(documents.get(obj_id))
+ signals.post_bulk_insert.send(
+ self._document, documents=results, loaded=True)
+ return return_one and results[0] or results
+
+ def count(self, with_limit_and_skip=False):
+ """Count the selected elements in the query.
+
+ :param with_limit_and_skip (optional): take any :meth:`limit` or
+ :meth:`skip` that has been applied to this cursor into account when
+ getting the count
+ """
+ if self._limit == 0 and with_limit_and_skip or self._none:
+ return 0
+ return self._cursor.count(with_limit_and_skip=with_limit_and_skip)
+
+ def delete(self, write_concern=None, _from_doc_delete=False):
+ """Delete the documents matched by the query.
+
+ :param write_concern: Extra keyword arguments are passed down which
+ will be used as options for the resultant
+ ``getLastError`` command. For example,
+ ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
+ wait until at least two servers have recorded the write and
+ will force an fsync on the primary server.
+ :param _from_doc_delete: True when called from document delete therefore
+ signals will have been triggered so don't loop.
+
+ :returns number of deleted documents
+ """
+ queryset = self.clone()
+ doc = queryset._document
+
+ if write_concern is None:
+ write_concern = {}
+
+ # Handle deletes where skips or limits have been applied or
+ # there is an untriggered delete signal
+ has_delete_signal = signals.signals_available and (
+ signals.pre_delete.has_receivers_for(self._document) or
+ signals.post_delete.has_receivers_for(self._document))
+
+ call_document_delete = (queryset._skip or queryset._limit or
+ has_delete_signal) and not _from_doc_delete
+
+ if call_document_delete:
+ cnt = 0
+ for doc in queryset:
+ doc.delete(write_concern=write_concern)
+ cnt += 1
+ return cnt
+
+ delete_rules = doc._meta.get('delete_rules') or {}
+ # Check for DENY rules before actually deleting/nullifying any other
+ # references
+ for rule_entry in delete_rules:
+ document_cls, field_name = rule_entry
+ if document_cls._meta.get('abstract'):
+ continue
+ rule = doc._meta['delete_rules'][rule_entry]
+ if rule == DENY and document_cls.objects(
+ **{field_name + '__in': self}).count() > 0:
+ msg = ("Could not delete document (%s.%s refers to it)"
+ % (document_cls.__name__, field_name))
+ raise OperationError(msg)
+
+ for rule_entry in delete_rules:
+ document_cls, field_name = rule_entry
+ if document_cls._meta.get('abstract'):
+ continue
+ rule = doc._meta['delete_rules'][rule_entry]
+ if rule == CASCADE:
+ ref_q = document_cls.objects(**{field_name + '__in': self})
+ ref_q_count = ref_q.count()
+ if (doc != document_cls and ref_q_count > 0
+ or (doc == document_cls and ref_q_count > 0)):
+ ref_q.delete(write_concern=write_concern)
+ elif rule == NULLIFY:
+ document_cls.objects(**{field_name + '__in': self}).update(
+ write_concern=write_concern, **{'unset__%s' % field_name: 1})
+ elif rule == PULL:
+ document_cls.objects(**{field_name + '__in': self}).update(
+ write_concern=write_concern,
+ **{'pull_all__%s' % field_name: self})
+
+ result = queryset._collection.remove(queryset._query, **write_concern)
+ return result["n"]
+
+ def update(self, upsert=False, multi=True, write_concern=None,
+ full_result=False, **update):
+ """Perform an atomic update on the fields matched by the query.
+
+ :param upsert: Any existing document with that "_id" is overwritten.
+ :param multi: Update multiple documents.
+ :param write_concern: Extra keyword arguments are passed down which
+ will be used as options for the resultant
+ ``getLastError`` command. For example,
+ ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
+ wait until at least two servers have recorded the write and
+ will force an fsync on the primary server.
+ :param full_result: Return the full result rather than just the number
+ updated.
+ :param update: Django-style update keyword arguments
+
+ .. versionadded:: 0.2
+ """
+ if not update and not upsert:
+ raise OperationError("No update parameters, would remove data")
+
+ if write_concern is None:
+ write_concern = {}
+
+ queryset = self.clone()
+ query = queryset._query
+ update = transform.update(queryset._document, **update)
+
+ # If doing an atomic upsert on an inheritable class
+ # then ensure we add _cls to the update operation
+ if upsert and '_cls' in query:
+ if '$set' in update:
+ update["$set"]["_cls"] = queryset._document._class_name
+ else:
+ update["$set"] = {"_cls": queryset._document._class_name}
+ try:
+ result = queryset._collection.update(query, update, multi=multi,
+ upsert=upsert, **write_concern)
+ if full_result:
+ return result
+ elif result:
+ return result['n']
+ except pymongo.errors.DuplicateKeyError, err:
+ raise NotUniqueError(u'Update failed (%s)' % unicode(err))
+ except pymongo.errors.OperationFailure, err:
+ if unicode(err) == u'multi not coded yet':
+ message = u'update() method requires MongoDB 1.1.3+'
+ raise OperationError(message)
+ raise OperationError(u'Update failed (%s)' % unicode(err))
+
+ def update_one(self, upsert=False, write_concern=None, **update):
+ """Perform an atomic update on first field matched by the query.
+
+ :param upsert: Any existing document with that "_id" is overwritten.
+ :param write_concern: Extra keyword arguments are passed down which
+ will be used as options for the resultant
+ ``getLastError`` command. For example,
+ ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
+ wait until at least two servers have recorded the write and
+ will force an fsync on the primary server.
+ :param update: Django-style update keyword arguments
+
+ .. versionadded:: 0.2
+ """
+ return self.update(
+ upsert=upsert, multi=False, write_concern=write_concern, **update)
+
+ def modify(self, upsert=False, full_response=False, remove=False, new=False, **update):
+ """Update and return the updated document.
+
+ Returns either the document before or after modification based on `new`
+ parameter. If no documents match the query and `upsert` is false,
+ returns ``None``. If upserting and `new` is false, returns ``None``.
+
+ If the full_response parameter is ``True``, the return value will be
+ the entire response object from the server, including the 'ok' and
+ 'lastErrorObject' fields, rather than just the modified document.
+ This is useful mainly because the 'lastErrorObject' document holds
+ information about the command's execution.
+
+ :param upsert: insert if document doesn't exist (default ``False``)
+ :param full_response: return the entire response object from the
+ server (default ``False``)
+ :param remove: remove rather than updating (default ``False``)
+ :param new: return updated rather than original document
+ (default ``False``)
+ :param update: Django-style update keyword arguments
+
+ .. versionadded:: 0.9
+ """
+
+ if remove and new:
+ raise OperationError("Conflicting parameters: remove and new")
+
+ if not update and not upsert and not remove:
+ raise OperationError(
+ "No update parameters, must either update or remove")
+
+ queryset = self.clone()
+ query = queryset._query
+ update = transform.update(queryset._document, **update)
+ sort = queryset._ordering
+
+ try:
+ result = queryset._collection.find_and_modify(
+ query, update, upsert=upsert, sort=sort, remove=remove, new=new,
+ full_response=full_response, **self._cursor_args)
+ except pymongo.errors.DuplicateKeyError, err:
+ raise NotUniqueError(u"Update failed (%s)" % err)
+ except pymongo.errors.OperationFailure, err:
+ raise OperationError(u"Update failed (%s)" % err)
+
+ if full_response:
+ if result["value"] is not None:
+ result["value"] = self._document._from_son(result["value"], only_fields=self.only_fields)
+ else:
+ if result is not None:
+ result = self._document._from_son(result, only_fields=self.only_fields)
+
+ return result
+
+ def with_id(self, object_id):
+ """Retrieve the object matching the id provided. Uses `object_id` only
+ and raises InvalidQueryError if a filter has been applied. Returns
+ `None` if no document exists with that id.
+
+ :param object_id: the value for the id of the document to look up
+
+ .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set
+ """
+ queryset = self.clone()
+ if not queryset._query_obj.empty:
+ msg = "Cannot use a filter whilst using `with_id`"
+ raise InvalidQueryError(msg)
+ return queryset.filter(pk=object_id).first()
+
+ def in_bulk(self, object_ids):
+ """Retrieve a set of documents by their ids.
+
+ :param object_ids: a list or tuple of ``ObjectId``\ s
+ :rtype: dict of ObjectIds as keys and collection-specific
+ Document subclasses as values.
+
+ .. versionadded:: 0.3
+ """
+ doc_map = {}
+
+ docs = self._collection.find({'_id': {'$in': object_ids}},
+ **self._cursor_args)
+ if self._scalar:
+ for doc in docs:
+ doc_map[doc['_id']] = self._get_scalar(
+ self._document._from_son(doc, only_fields=self.only_fields))
+ elif self._as_pymongo:
+ for doc in docs:
+ doc_map[doc['_id']] = self._get_as_pymongo(doc)
+ else:
+ for doc in docs:
+ doc_map[doc['_id']] = self._document._from_son(doc,
+ only_fields=self.only_fields,
+ _auto_dereference=self._auto_dereference)
+
+ return doc_map
+
+ def none(self):
+ """Helper that just returns a list"""
+ queryset = self.clone()
+ queryset._none = True
+ return queryset
+
+ def no_sub_classes(self):
+ """
+ Only return instances of this document and not any inherited documents
+ """
+ if self._document._meta.get('allow_inheritance') is True:
+ self._initial_query = {"_cls": self._document._class_name}
+
+ return self
+
+ def using(self, alias):
+ """This method is for controlling which database the QuerySet will be evaluated against if you are using more than one database.
+
+ :param alias: The database alias
+
+ .. versionadded:: 0.9
+ """
+
+ with switch_db(self._document, alias) as cls:
+ collection = cls._get_collection()
+
+ return self.clone_into(self.__class__(self._document, collection))
+
+ def clone(self):
+ """Creates a copy of the current
+ :class:`~mongoengine.queryset.QuerySet`
+
+ .. versionadded:: 0.5
+ """
+ return self.clone_into(self.__class__(self._document, self._collection_obj))
+
+ def clone_into(self, cls):
+ """Creates a copy of the current
+ :class:`~mongoengine.queryset.base.BaseQuerySet` into another child class
+ """
+ if not isinstance(cls, BaseQuerySet):
+ raise OperationError(
+ '%s is not a subclass of BaseQuerySet' % cls.__name__)
+
+ copy_props = ('_mongo_query', '_initial_query', '_none', '_query_obj',
+ '_where_clause', '_loaded_fields', '_ordering', '_snapshot',
+ '_timeout', '_class_check', '_slave_okay', '_read_preference',
+ '_iter', '_scalar', '_as_pymongo', '_as_pymongo_coerce',
+ '_limit', '_skip', '_hint', '_auto_dereference',
+ '_search_text', 'only_fields', '_max_time_ms')
+
+ for prop in copy_props:
+ val = getattr(self, prop)
+ setattr(cls, prop, copy.copy(val))
+
+ if self._cursor_obj:
+ cls._cursor_obj = self._cursor_obj.clone()
+
+ return cls
+
+ def select_related(self, max_depth=1):
+ """Handles dereferencing of :class:`~bson.dbref.DBRef` objects or
+ :class:`~bson.object_id.ObjectId` a maximum depth in order to cut down
+ the number queries to mongodb.
+
+ .. versionadded:: 0.5
+ """
+ # Make select related work the same for querysets
+ max_depth += 1
+ queryset = self.clone()
+ return queryset._dereference(queryset, max_depth=max_depth)
+
+ def limit(self, n):
+ """Limit the number of returned documents to `n`. This may also be
+ achieved using array-slicing syntax (e.g. ``User.objects[:5]``).
+
+ :param n: the maximum number of objects to return
+ """
+ queryset = self.clone()
+ if n == 0:
+ queryset._cursor.limit(1)
+ else:
+ queryset._cursor.limit(n)
+ queryset._limit = n
+ # Return self to allow chaining
+ return queryset
+
+ def skip(self, n):
+ """Skip `n` documents before returning the results. This may also be
+ achieved using array-slicing syntax (e.g. ``User.objects[5:]``).
+
+ :param n: the number of objects to skip before returning results
+ """
+ queryset = self.clone()
+ queryset._cursor.skip(n)
+ queryset._skip = n
+ return queryset
+
+ def hint(self, index=None):
+ """Added 'hint' support, telling Mongo the proper index to use for the
+ query.
+
+ Judicious use of hints can greatly improve query performance. When
+ doing a query on multiple fields (at least one of which is indexed)
+ pass the indexed field as a hint to the query.
+
+ Hinting will not do anything if the corresponding index does not exist.
+ The last hint applied to this cursor takes precedence over all others.
+
+ .. versionadded:: 0.5
+ """
+ queryset = self.clone()
+ queryset._cursor.hint(index)
+ queryset._hint = index
+ return queryset
+
+ def distinct(self, field):
+ """Return a list of distinct values for a given field.
+
+ :param field: the field to select distinct values from
+
+ .. note:: This is a command and won't take ordering or limit into
+ account.
+
+ .. versionadded:: 0.4
+ .. versionchanged:: 0.5 - Fixed handling references
+ .. versionchanged:: 0.6 - Improved db_field refrence handling
+ """
+ queryset = self.clone()
+ try:
+ field = self._fields_to_dbfields([field]).pop()
+ finally:
+ distinct = self._dereference(queryset._cursor.distinct(field), 1,
+ name=field, instance=self._document)
+
+ doc_field = self._document._fields.get(field.split('.', 1)[0])
+ instance = False
+ # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
+ EmbeddedDocumentField = _import_class('EmbeddedDocumentField')
+ ListField = _import_class('ListField')
+ GenericEmbeddedDocumentField = _import_class('GenericEmbeddedDocumentField')
+ if isinstance(doc_field, ListField):
+ doc_field = getattr(doc_field, "field", doc_field)
+ if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
+ instance = getattr(doc_field, "document_type", False)
+ # handle distinct on subdocuments
+ if '.' in field:
+ for field_part in field.split('.')[1:]:
+ # if looping on embedded document, get the document type instance
+ if instance and isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
+ doc_field = instance
+ # now get the subdocument
+ doc_field = getattr(doc_field, field_part, doc_field)
+ # We may need to cast to the correct type eg. ListField(EmbeddedDocumentField)
+ if isinstance(doc_field, ListField):
+ doc_field = getattr(doc_field, "field", doc_field)
+ if isinstance(doc_field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)):
+ instance = getattr(doc_field, "document_type", False)
+ if instance and isinstance(doc_field, (EmbeddedDocumentField,
+ GenericEmbeddedDocumentField)):
+ distinct = [instance(**doc) for doc in distinct]
+ return distinct
+
+ def only(self, *fields):
+ """Load only a subset of this document's fields. ::
+
+ post = BlogPost.objects(...).only("title", "author.name")
+
+ .. note :: `only()` is chainable and will perform a union ::
+ So with the following it will fetch both: `title` and `author.name`::
+
+ post = BlogPost.objects.only("title").only("author.name")
+
+ :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any
+ field filters.
+
+ :param fields: fields to include
+
+ .. versionadded:: 0.3
+ .. versionchanged:: 0.5 - Added subfield support
+ """
+ fields = dict([(f, QueryFieldList.ONLY) for f in fields])
+ self.only_fields = fields.keys()
+ return self.fields(True, **fields)
+
+ def exclude(self, *fields):
+ """Opposite to .only(), exclude some document's fields. ::
+
+ post = BlogPost.objects(...).exclude("comments")
+
+ .. note :: `exclude()` is chainable and will perform a union ::
+ So with the following it will exclude both: `title` and `author.name`::
+
+ post = BlogPost.objects.exclude("title").exclude("author.name")
+
+ :func:`~mongoengine.queryset.QuerySet.all_fields` will reset any
+ field filters.
+
+ :param fields: fields to exclude
+
+ .. versionadded:: 0.5
+ """
+ fields = dict([(f, QueryFieldList.EXCLUDE) for f in fields])
+ return self.fields(**fields)
+
+ def fields(self, _only_called=False, **kwargs):
+ """Manipulate how you load this document's fields. Used by `.only()`
+ and `.exclude()` to manipulate which fields to retrieve. Fields also
+ allows for a greater level of control for example:
+
+ Retrieving a Subrange of Array Elements:
+
+ You can use the $slice operator to retrieve a subrange of elements in
+ an array. For example to get the first 5 comments::
+
+ post = BlogPost.objects(...).fields(slice__comments=5)
+
+ :param kwargs: A dictionary identifying what to include
+
+ .. versionadded:: 0.5
+ """
+
+ # Check for an operator and transform to mongo-style if there is
+ operators = ["slice"]
+ cleaned_fields = []
+ for key, value in kwargs.items():
+ parts = key.split('__')
+ op = None
+ if parts[0] in operators:
+ op = parts.pop(0)
+ value = {'$' + op: value}
+ key = '.'.join(parts)
+ cleaned_fields.append((key, value))
+
+ fields = sorted(cleaned_fields, key=operator.itemgetter(1))
+ queryset = self.clone()
+ for value, group in itertools.groupby(fields, lambda x: x[1]):
+ fields = [field for field, value in group]
+ fields = queryset._fields_to_dbfields(fields)
+ queryset._loaded_fields += QueryFieldList(
+ fields, value=value, _only_called=_only_called)
+
+ return queryset
+
+ def all_fields(self):
+ """Include all fields. Reset all previously calls of .only() or
+ .exclude(). ::
+
+ post = BlogPost.objects.exclude("comments").all_fields()
+
+ .. versionadded:: 0.5
+ """
+ queryset = self.clone()
+ queryset._loaded_fields = QueryFieldList(
+ always_include=queryset._loaded_fields.always_include)
+ return queryset
+
+ def order_by(self, *keys):
+ """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The
+ order may be specified by prepending each of the keys by a + or a -.
+ Ascending order is assumed.
+
+ :param keys: fields to order the query results by; keys may be
+ prefixed with **+** or **-** to determine the ordering direction
+ """
+ queryset = self.clone()
+ queryset._ordering = queryset._get_order_by(keys)
+ return queryset
+
+ def explain(self, format=False):
+ """Return an explain plan record for the
+ :class:`~mongoengine.queryset.QuerySet`\ 's cursor.
+
+ :param format: format the plan before returning it
+ """
+ plan = self._cursor.explain()
+ if format:
+ plan = pprint.pformat(plan)
+ return plan
+
+ def snapshot(self, enabled):
+ """Enable or disable snapshot mode when querying.
+
+ :param enabled: whether or not snapshot mode is enabled
+
+ ..versionchanged:: 0.5 - made chainable
+ """
+ queryset = self.clone()
+ queryset._snapshot = enabled
+ return queryset
+
+ def timeout(self, enabled):
+ """Enable or disable the default mongod timeout when querying.
+
+ :param enabled: whether or not the timeout is used
+
+ ..versionchanged:: 0.5 - made chainable
+ """
+ queryset = self.clone()
+ queryset._timeout = enabled
+ return queryset
+
+ def slave_okay(self, enabled):
+ """Enable or disable the slave_okay when querying.
+
+ :param enabled: whether or not the slave_okay is enabled
+ """
+ queryset = self.clone()
+ queryset._slave_okay = enabled
+ return queryset
+
+ def read_preference(self, read_preference):
+ """Change the read_preference when querying.
+
+ :param read_preference: override ReplicaSetConnection-level
+ preference.
+ """
+ validate_read_preference('read_preference', read_preference)
+ queryset = self.clone()
+ queryset._read_preference = read_preference
+ return queryset
+
+ def scalar(self, *fields):
+ """Instead of returning Document instances, return either a specific
+ value or a tuple of values in order.
+
+ Can be used along with
+ :func:`~mongoengine.queryset.QuerySet.no_dereference` to turn off
+ dereferencing.
+
+ .. note:: This effects all results and can be unset by calling
+ ``scalar`` without arguments. Calls ``only`` automatically.
+
+ :param fields: One or more fields to return instead of a Document.
+ """
+ queryset = self.clone()
+ queryset._scalar = list(fields)
+
+ if fields:
+ queryset = queryset.only(*fields)
+ else:
+ queryset = queryset.all_fields()
+
+ return queryset
+
+ def values_list(self, *fields):
+ """An alias for scalar"""
+ return self.scalar(*fields)
+
+ def as_pymongo(self, coerce_types=False):
+ """Instead of returning Document instances, return raw values from
+ pymongo.
+
+ :param coerce_type: Field types (if applicable) would be use to
+ coerce types.
+ """
+ queryset = self.clone()
+ queryset._as_pymongo = True
+ queryset._as_pymongo_coerce = coerce_types
+ return queryset
+
+ def max_time_ms(self, ms):
+ """Wait `ms` milliseconds before killing the query on the server
+
+ :param ms: the number of milliseconds before killing the query on the server
+ """
+ return self._chainable_method("max_time_ms", ms)
+
+ # JSON Helpers
+
+ def to_json(self, *args, **kwargs):
+ """Converts a queryset to JSON"""
+ return json_util.dumps(self.as_pymongo(), *args, **kwargs)
+
+ def from_json(self, json_data):
+ """Converts json data to unsaved objects"""
+ son_data = json_util.loads(json_data)
+ return [self._document._from_son(data, only_fields=self.only_fields) for data in son_data]
+
+ def aggregate(self, *pipeline, **kwargs):
+ """
+ Perform a aggregate function based in your queryset params
+ :param pipeline: list of aggregation commands,\
+ see: http://docs.mongodb.org/manual/core/aggregation-pipeline/
+
+ .. versionadded:: 0.9
+ """
+ initial_pipeline = []
+
+ if self._query:
+ initial_pipeline.append({'$match': self._query})
+
+ if self._ordering:
+ initial_pipeline.append({'$sort': dict(self._ordering)})
+
+ if self._limit is not None:
+ initial_pipeline.append({'$limit': self._limit})
+
+ if self._skip is not None:
+ initial_pipeline.append({'$skip': self._skip})
+
+ pipeline = initial_pipeline + list(pipeline)
+
+ return self._collection.aggregate(pipeline, cursor={}, **kwargs)
+
+ # JS functionality
+ def map_reduce(self, map_f, reduce_f, output, finalize_f=None, limit=None,
+ scope=None):
+ """Perform a map/reduce query using the current query spec
+ and ordering. While ``map_reduce`` respects ``QuerySet`` chaining,
+ it must be the last call made, as it does not return a maleable
+ ``QuerySet``.
+
+ See the :meth:`~mongoengine.tests.QuerySetTest.test_map_reduce`
+ and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced`
+ tests in ``tests.queryset.QuerySetTest`` for usage examples.
+
+ :param map_f: map function, as :class:`~bson.code.Code` or string
+ :param reduce_f: reduce function, as
+ :class:`~bson.code.Code` or string
+ :param output: output collection name, if set to 'inline' will try to
+ use :class:`~pymongo.collection.Collection.inline_map_reduce`
+ This can also be a dictionary containing output options
+ see: http://docs.mongodb.org/manual/reference/command/mapReduce/#dbcmd.mapReduce
+ :param finalize_f: finalize function, an optional function that
+ performs any post-reduction processing.
+ :param scope: values to insert into map/reduce global scope. Optional.
+ :param limit: number of objects from current query to provide
+ to map/reduce method
+
+ Returns an iterator yielding
+ :class:`~mongoengine.document.MapReduceDocument`.
+
+ .. note::
+
+ Map/Reduce changed in server version **>= 1.7.4**. The PyMongo
+ :meth:`~pymongo.collection.Collection.map_reduce` helper requires
+ PyMongo version **>= 1.11**.
+
+ .. versionchanged:: 0.5
+ - removed ``keep_temp`` keyword argument, which was only relevant
+ for MongoDB server versions older than 1.7.4
+
+ .. versionadded:: 0.3
+ """
+ queryset = self.clone()
+
+ MapReduceDocument = _import_class('MapReduceDocument')
+
+ if not hasattr(self._collection, "map_reduce"):
+ raise NotImplementedError("Requires MongoDB >= 1.7.1")
+
+ map_f_scope = {}
+ if isinstance(map_f, Code):
+ map_f_scope = map_f.scope
+ map_f = unicode(map_f)
+ map_f = Code(queryset._sub_js_fields(map_f), map_f_scope)
+
+ reduce_f_scope = {}
+ if isinstance(reduce_f, Code):
+ reduce_f_scope = reduce_f.scope
+ reduce_f = unicode(reduce_f)
+ reduce_f_code = queryset._sub_js_fields(reduce_f)
+ reduce_f = Code(reduce_f_code, reduce_f_scope)
+
+ mr_args = {'query': queryset._query}
+
+ if finalize_f:
+ finalize_f_scope = {}
+ if isinstance(finalize_f, Code):
+ finalize_f_scope = finalize_f.scope
+ finalize_f = unicode(finalize_f)
+ finalize_f_code = queryset._sub_js_fields(finalize_f)
+ finalize_f = Code(finalize_f_code, finalize_f_scope)
+ mr_args['finalize'] = finalize_f
+
+ if scope:
+ mr_args['scope'] = scope
+
+ if limit:
+ mr_args['limit'] = limit
+
+ if output == 'inline' and not queryset._ordering:
+ map_reduce_function = 'inline_map_reduce'
+ else:
+ map_reduce_function = 'map_reduce'
+
+ if isinstance(output, basestring):
+ mr_args['out'] = output
+
+ elif isinstance(output, dict):
+ ordered_output = []
+
+ for part in ('replace', 'merge', 'reduce'):
+ value = output.get(part)
+ if value:
+ ordered_output.append((part, value))
+ break
+
+ else:
+ raise OperationError("actionData not specified for output")
+
+ db_alias = output.get('db_alias')
+ remaing_args = ['db', 'sharded', 'nonAtomic']
+
+ if db_alias:
+ ordered_output.append(('db', get_db(db_alias).name))
+ del remaing_args[0]
+
+ for part in remaing_args:
+ value = output.get(part)
+ if value:
+ ordered_output.append((part, value))
+
+ mr_args['out'] = SON(ordered_output)
+
+ results = getattr(queryset._collection, map_reduce_function)(
+ map_f, reduce_f, **mr_args)
+
+ if map_reduce_function == 'map_reduce':
+ results = results.find()
+
+ if queryset._ordering:
+ results = results.sort(queryset._ordering)
+
+ for doc in results:
+ yield MapReduceDocument(queryset._document, queryset._collection,
+ doc['_id'], doc['value'])
+
+ def exec_js(self, code, *fields, **options):
+ """Execute a Javascript function on the server. A list of fields may be
+ provided, which will be translated to their correct names and supplied
+ as the arguments to the function. A few extra variables are added to
+ the function's scope: ``collection``, which is the name of the
+ collection in use; ``query``, which is an object representing the
+ current query; and ``options``, which is an object containing any
+ options specified as keyword arguments.
+
+ As fields in MongoEngine may use different names in the database (set
+ using the :attr:`db_field` keyword argument to a :class:`Field`
+ constructor), a mechanism exists for replacing MongoEngine field names
+ with the database field names in Javascript code. When accessing a
+ field, use square-bracket notation, and prefix the MongoEngine field
+ name with a tilde (~).
+
+ :param code: a string of Javascript code to execute
+ :param fields: fields that you will be using in your function, which
+ will be passed in to your function as arguments
+ :param options: options that you want available to the function
+ (accessed in Javascript through the ``options`` object)
+ """
+ queryset = self.clone()
+
+ code = queryset._sub_js_fields(code)
+
+ fields = [queryset._document._translate_field_name(f) for f in fields]
+ collection = queryset._document._get_collection_name()
+
+ scope = {
+ 'collection': collection,
+ 'options': options or {},
+ }
+
+ query = queryset._query
+ if queryset._where_clause:
+ query['$where'] = queryset._where_clause
+
+ scope['query'] = query
+ code = Code(code, scope=scope)
+
+ db = queryset._document._get_db()
+ return db.eval(code, *fields)
+
+ def where(self, where_clause):
+ """Filter ``QuerySet`` results with a ``$where`` clause (a Javascript
+ expression). Performs automatic field name substitution like
+ :meth:`mongoengine.queryset.Queryset.exec_js`.
+
+ .. note:: When using this mode of query, the database will call your
+ function, or evaluate your predicate clause, for each object
+ in the collection.
+
+ .. versionadded:: 0.5
+ """
+ queryset = self.clone()
+ where_clause = queryset._sub_js_fields(where_clause)
+ queryset._where_clause = where_clause
+ return queryset
+
+ def sum(self, field):
+ """Sum over the values of the specified field.
+
+ :param field: the field to sum over; use dot-notation to refer to
+ embedded document fields
+
+ .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
+ with sharding.
+ """
+ map_func = """
+ function() {
+ var path = '{{~%(field)s}}'.split('.'),
+ field = this;
+
+ for (p in path) {
+ if (typeof field != 'undefined')
+ field = field[path[p]];
+ else
+ break;
+ }
+
+ if (field && field.constructor == Array) {
+ field.forEach(function(item) {
+ emit(1, item||0);
+ });
+ } else if (typeof field != 'undefined') {
+ emit(1, field||0);
+ }
+ }
+ """ % dict(field=field)
+
+ reduce_func = Code("""
+ function(key, values) {
+ var sum = 0;
+ for (var i in values) {
+ sum += values[i];
+ }
+ return sum;
+ }
+ """)
+
+ for result in self.map_reduce(map_func, reduce_func, output='inline'):
+ return result.value
+ else:
+ return 0
+
+ def average(self, field):
+ """Average over the values of the specified field.
+
+ :param field: the field to average over; use dot-notation to refer to
+ embedded document fields
+
+ .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work
+ with sharding.
+ """
+ map_func = """
+ function() {
+ var path = '{{~%(field)s}}'.split('.'),
+ field = this;
+
+ for (p in path) {
+ if (typeof field != 'undefined')
+ field = field[path[p]];
+ else
+ break;
+ }
+
+ if (field && field.constructor == Array) {
+ field.forEach(function(item) {
+ emit(1, {t: item||0, c: 1});
+ });
+ } else if (typeof field != 'undefined') {
+ emit(1, {t: field||0, c: 1});
+ }
+ }
+ """ % dict(field=field)
+
+ reduce_func = Code("""
+ function(key, values) {
+ var out = {t: 0, c: 0};
+ for (var i in values) {
+ var value = values[i];
+ out.t += value.t;
+ out.c += value.c;
+ }
+ return out;
+ }
+ """)
+
+ finalize_func = Code("""
+ function(key, value) {
+ return value.t / value.c;
+ }
+ """)
+
+ for result in self.map_reduce(map_func, reduce_func,
+ finalize_f=finalize_func, output='inline'):
+ return result.value
+ else:
+ return 0
+
+ def item_frequencies(self, field, normalize=False, map_reduce=True):
+ """Returns a dictionary of all items present in a field across
+ the whole queried set of documents, and their corresponding frequency.
+ This is useful for generating tag clouds, or searching documents.
+
+ .. note::
+
+ Can only do direct simple mappings and cannot map across
+ :class:`~mongoengine.fields.ReferenceField` or
+ :class:`~mongoengine.fields.GenericReferenceField` for more complex
+ counting a manual map reduce call would is required.
+
+ If the field is a :class:`~mongoengine.fields.ListField`, the items within
+ each list will be counted individually.
+
+ :param field: the field to use
+ :param normalize: normalize the results so they add to 1.0
+ :param map_reduce: Use map_reduce over exec_js
+
+ .. versionchanged:: 0.5 defaults to map_reduce and can handle embedded
+ document lookups
+ """
+ if map_reduce:
+ return self._item_frequencies_map_reduce(field,
+ normalize=normalize)
+ return self._item_frequencies_exec_js(field, normalize=normalize)
+
+ # Iterator helpers
+
+ def next(self):
+ """Wrap the result in a :class:`~mongoengine.Document` object.
+ """
+ if self._limit == 0 or self._none:
+ raise StopIteration
+
+ raw_doc = self._cursor.next()
+ if self._as_pymongo:
+ return self._get_as_pymongo(raw_doc)
+ doc = self._document._from_son(raw_doc,
+ _auto_dereference=self._auto_dereference, only_fields=self.only_fields)
+
+ if self._scalar:
+ return self._get_scalar(doc)
+
+ return doc
+
+ def rewind(self):
+ """Rewind the cursor to its unevaluated state.
+
+
+ .. versionadded:: 0.3
+ """
+ self._iter = False
+ self._cursor.rewind()
+
+ # Properties
+
+ @property
+ def _collection(self):
+ """Property that returns the collection object. This allows us to
+ perform operations only if the collection is accessed.
+ """
+ return self._collection_obj
+
+ @property
+ def _cursor_args(self):
+ cursor_args = {
+ 'snapshot': self._snapshot,
+ 'timeout': self._timeout
+ }
+ if self._read_preference is not None:
+ cursor_args['read_preference'] = self._read_preference
+ else:
+ cursor_args['slave_okay'] = self._slave_okay
+ if self._loaded_fields:
+ cursor_args['fields'] = self._loaded_fields.as_dict()
+
+ if self._search_text:
+ if 'fields' not in cursor_args:
+ cursor_args['fields'] = {}
+
+ cursor_args['fields']['_text_score'] = {'$meta': "textScore"}
+
+ return cursor_args
+
+ @property
+ def _cursor(self):
+ if self._cursor_obj is None:
+
+ self._cursor_obj = self._collection.find(self._query,
+ **self._cursor_args)
+ # Apply where clauses to cursor
+ if self._where_clause:
+ where_clause = self._sub_js_fields(self._where_clause)
+ self._cursor_obj.where(where_clause)
+
+ if self._ordering:
+ # Apply query ordering
+ self._cursor_obj.sort(self._ordering)
+ elif self._ordering is None and self._document._meta['ordering']:
+ # Otherwise, apply the ordering from the document model, unless
+ # it's been explicitly cleared via order_by with no arguments
+ order = self._get_order_by(self._document._meta['ordering'])
+ self._cursor_obj.sort(order)
+
+ if self._limit is not None:
+ self._cursor_obj.limit(self._limit)
+
+ if self._skip is not None:
+ self._cursor_obj.skip(self._skip)
+
+ if self._hint != -1:
+ self._cursor_obj.hint(self._hint)
+
+ return self._cursor_obj
+
+ def __deepcopy__(self, memo):
+ """Essential for chained queries with ReferenceFields involved"""
+ return self.clone()
+
+ @property
+ def _query(self):
+ if self._mongo_query is None:
+ self._mongo_query = self._query_obj.to_query(self._document)
+ if self._class_check and self._initial_query:
+ if "_cls" in self._mongo_query:
+ self._mongo_query = {"$and": [self._initial_query, self._mongo_query]}
+ else:
+ self._mongo_query.update(self._initial_query)
+ return self._mongo_query
+
+ @property
+ def _dereference(self):
+ if not self.__dereference:
+ self.__dereference = _import_class('DeReference')()
+ return self.__dereference
+
+ def no_dereference(self):
+ """Turn off any dereferencing for the results of this queryset.
+ """
+ queryset = self.clone()
+ queryset._auto_dereference = False
+ return queryset
+
+ # Helper Functions
+
+ def _item_frequencies_map_reduce(self, field, normalize=False):
+ map_func = """
+ function() {
+ var path = '{{~%(field)s}}'.split('.');
+ var field = this;
+
+ for (p in path) {
+ if (typeof field != 'undefined')
+ field = field[path[p]];
+ else
+ break;
+ }
+ if (field && field.constructor == Array) {
+ field.forEach(function(item) {
+ emit(item, 1);
+ });
+ } else if (typeof field != 'undefined') {
+ emit(field, 1);
+ } else {
+ emit(null, 1);
+ }
+ }
+ """ % dict(field=field)
+ reduce_func = """
+ function(key, values) {
+ var total = 0;
+ var valuesSize = values.length;
+ for (var i=0; i < valuesSize; i++) {
+ total += parseInt(values[i], 10);
+ }
+ return total;
+ }
+ """
+ values = self.map_reduce(map_func, reduce_func, 'inline')
+ frequencies = {}
+ for f in values:
+ key = f.key
+ if isinstance(key, float):
+ if int(key) == key:
+ key = int(key)
+ frequencies[key] = int(f.value)
+
+ if normalize:
+ count = sum(frequencies.values())
+ frequencies = dict([(k, float(v) / count)
+ for k, v in frequencies.items()])
+
+ return frequencies
+
+ def _item_frequencies_exec_js(self, field, normalize=False):
+ """Uses exec_js to execute"""
+ freq_func = """
+ function(path) {
+ var path = path.split('.');
+
+ var total = 0.0;
+ db[collection].find(query).forEach(function(doc) {
+ var field = doc;
+ for (p in path) {
+ if (field)
+ field = field[path[p]];
+ else
+ break;
+ }
+ if (field && field.constructor == Array) {
+ total += field.length;
+ } else {
+ total++;
+ }
+ });
+
+ var frequencies = {};
+ var types = {};
+ var inc = 1.0;
+
+ db[collection].find(query).forEach(function(doc) {
+ field = doc;
+ for (p in path) {
+ if (field)
+ field = field[path[p]];
+ else
+ break;
+ }
+ if (field && field.constructor == Array) {
+ field.forEach(function(item) {
+ frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
+ });
+ } else {
+ var item = field;
+ types[item] = item;
+ frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]);
+ }
+ });
+ return [total, frequencies, types];
+ }
+ """
+ total, data, types = self.exec_js(freq_func, field)
+ values = dict([(types.get(k), int(v)) for k, v in data.iteritems()])
+
+ if normalize:
+ values = dict([(k, float(v) / total) for k, v in values.items()])
+
+ frequencies = {}
+ for k, v in values.iteritems():
+ if isinstance(k, float):
+ if int(k) == k:
+ k = int(k)
+
+ frequencies[k] = v
+
+ return frequencies
+
+ def _fields_to_dbfields(self, fields, subdoc=False):
+ """Translate fields paths to its db equivalents"""
+ ret = []
+ subclasses = []
+ document = self._document
+ if document._meta['allow_inheritance']:
+ subclasses = [get_document(x)
+ for x in document._subclasses][1:]
+ for field in fields:
+ try:
+ field = ".".join(f.db_field for f in
+ document._lookup_field(field.split('.')))
+ ret.append(field)
+ except LookUpError, err:
+ found = False
+ for subdoc in subclasses:
+ try:
+ subfield = ".".join(f.db_field for f in
+ subdoc._lookup_field(field.split('.')))
+ ret.append(subfield)
+ found = True
+ break
+ except LookUpError, e:
+ pass
+
+ if not found:
+ raise err
+ return ret
+
+ def _get_order_by(self, keys):
+ """Creates a list of order by fields
+ """
+ key_list = []
+ for key in keys:
+ if not key:
+ continue
+
+ if key == '$text_score':
+ key_list.append(('_text_score', {'$meta': "textScore"}))
+ continue
+
+ direction = pymongo.ASCENDING
+ if key[0] == '-':
+ direction = pymongo.DESCENDING
+ if key[0] in ('-', '+'):
+ key = key[1:]
+ key = key.replace('__', '.')
+ try:
+ key = self._document._translate_field_name(key)
+ except:
+ pass
+ key_list.append((key, direction))
+
+ if self._cursor_obj and key_list:
+ self._cursor_obj.sort(key_list)
+ return key_list
+
+ def _get_scalar(self, doc):
+
+ def lookup(obj, name):
+ chunks = name.split('__')
+ for chunk in chunks:
+ obj = getattr(obj, chunk)
+ return obj
+
+ data = [lookup(doc, n) for n in self._scalar]
+ if len(data) == 1:
+ return data[0]
+
+ return tuple(data)
+
+ def _get_as_pymongo(self, row):
+ # Extract which fields paths we should follow if .fields(...) was
+ # used. If not, handle all fields.
+ if not getattr(self, '__as_pymongo_fields', None):
+ self.__as_pymongo_fields = []
+
+ for field in self._loaded_fields.fields - set(['_cls']):
+ self.__as_pymongo_fields.append(field)
+ while '.' in field:
+ field, _ = field.rsplit('.', 1)
+ self.__as_pymongo_fields.append(field)
+
+ all_fields = not self.__as_pymongo_fields
+
+ def clean(data, path=None):
+ path = path or ''
+
+ if isinstance(data, dict):
+ new_data = {}
+ for key, value in data.iteritems():
+ new_path = '%s.%s' % (path, key) if path else key
+
+ if all_fields:
+ include_field = True
+ elif self._loaded_fields.value == QueryFieldList.ONLY:
+ include_field = new_path in self.__as_pymongo_fields
+ else:
+ include_field = new_path not in self.__as_pymongo_fields
+
+ if include_field:
+ new_data[key] = clean(value, path=new_path)
+ data = new_data
+ elif isinstance(data, list):
+ data = [clean(d, path=path) for d in data]
+ else:
+ if self._as_pymongo_coerce:
+ # If we need to coerce types, we need to determine the
+ # type of this field and use the corresponding
+ # .to_python(...)
+ from mongoengine.fields import EmbeddedDocumentField
+
+ obj = self._document
+ for chunk in path.split('.'):
+ obj = getattr(obj, chunk, None)
+ if obj is None:
+ break
+ elif isinstance(obj, EmbeddedDocumentField):
+ obj = obj.document_type
+ if obj and data is not None:
+ data = obj.to_python(data)
+ return data
+
+ return clean(row)
+
+ def _sub_js_fields(self, code):
+ """When fields are specified with [~fieldname] syntax, where
+ *fieldname* is the Python name of a field, *fieldname* will be
+ substituted for the MongoDB name of the field (specified using the
+ :attr:`name` keyword argument in a field's constructor).
+ """
+
+ def field_sub(match):
+ # Extract just the field name, and look up the field objects
+ field_name = match.group(1).split('.')
+ fields = self._document._lookup_field(field_name)
+ # Substitute the correct name for the field into the javascript
+ return u'["%s"]' % fields[-1].db_field
+
+ def field_path_sub(match):
+ # Extract just the field name, and look up the field objects
+ field_name = match.group(1).split('.')
+ fields = self._document._lookup_field(field_name)
+ # Substitute the correct name for the field into the javascript
+ return ".".join([f.db_field for f in fields])
+
+ code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code)
+ code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub,
+ code)
+ return code
+
+ def _chainable_method(self, method_name, val):
+ queryset = self.clone()
+ method = getattr(queryset._cursor, method_name)
+ method(val)
+ setattr(queryset, "_" + method_name, val)
+ return queryset
+
+ # Deprecated
+ def ensure_index(self, **kwargs):
+ """Deprecated use :func:`Document.ensure_index`"""
+ msg = ("Doc.objects()._ensure_index() is deprecated. "
+ "Use Doc.ensure_index() instead.")
+ warnings.warn(msg, DeprecationWarning)
+ self._document.__class__.ensure_index(**kwargs)
+ return self
+
+ def _ensure_indexes(self):
+ """Deprecated use :func:`~Document.ensure_indexes`"""
+ msg = ("Doc.objects()._ensure_indexes() is deprecated. "
+ "Use Doc.ensure_indexes() instead.")
+ warnings.warn(msg, DeprecationWarning)
+ self._document.__class__.ensure_indexes()
diff --git a/awx/lib/site-packages/mongoengine/queryset/field_list.py b/awx/lib/site-packages/mongoengine/queryset/field_list.py
new file mode 100644
index 0000000000..140a71e051
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/queryset/field_list.py
@@ -0,0 +1,86 @@
+
+__all__ = ('QueryFieldList',)
+
+
+class QueryFieldList(object):
+ """Object that handles combinations of .only() and .exclude() calls"""
+ ONLY = 1
+ EXCLUDE = 0
+
+ def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False):
+ """The QueryFieldList builder
+
+ :param fields: A list of fields used in `.only()` or `.exclude()`
+ :param value: How to handle the fields; either `ONLY` or `EXCLUDE`
+ :param always_include: Any fields to always_include eg `_cls`
+ :param _only_called: Has `.only()` been called? If so its a set of fields
+ otherwise it performs a union.
+ """
+ self.value = value
+ self.fields = set(fields or [])
+ self.always_include = set(always_include or [])
+ self._id = None
+ self._only_called = _only_called
+ self.slice = {}
+
+ def __add__(self, f):
+ if isinstance(f.value, dict):
+ for field in f.fields:
+ self.slice[field] = f.value
+ if not self.fields:
+ self.fields = f.fields
+ elif not self.fields:
+ self.fields = f.fields
+ self.value = f.value
+ self.slice = {}
+ elif self.value is self.ONLY and f.value is self.ONLY:
+ self._clean_slice()
+ if self._only_called:
+ self.fields = self.fields.union(f.fields)
+ else:
+ self.fields = f.fields
+ elif self.value is self.EXCLUDE and f.value is self.EXCLUDE:
+ self.fields = self.fields.union(f.fields)
+ self._clean_slice()
+ elif self.value is self.ONLY and f.value is self.EXCLUDE:
+ self.fields -= f.fields
+ self._clean_slice()
+ elif self.value is self.EXCLUDE and f.value is self.ONLY:
+ self.value = self.ONLY
+ self.fields = f.fields - self.fields
+ self._clean_slice()
+
+ if '_id' in f.fields:
+ self._id = f.value
+
+ if self.always_include:
+ if self.value is self.ONLY and self.fields:
+ if sorted(self.slice.keys()) != sorted(self.fields):
+ self.fields = self.fields.union(self.always_include)
+ else:
+ self.fields -= self.always_include
+
+ if getattr(f, '_only_called', False):
+ self._only_called = True
+ return self
+
+ def __nonzero__(self):
+ return bool(self.fields)
+
+ def as_dict(self):
+ field_list = dict((field, self.value) for field in self.fields)
+ if self.slice:
+ field_list.update(self.slice)
+ if self._id is not None:
+ field_list['_id'] = self._id
+ return field_list
+
+ def reset(self):
+ self.fields = set([])
+ self.slice = {}
+ self.value = self.ONLY
+
+ def _clean_slice(self):
+ if self.slice:
+ for field in set(self.slice.keys()) - self.fields:
+ del self.slice[field]
diff --git a/awx/lib/site-packages/mongoengine/queryset/manager.py b/awx/lib/site-packages/mongoengine/queryset/manager.py
new file mode 100644
index 0000000000..47c2143dac
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/queryset/manager.py
@@ -0,0 +1,57 @@
+from functools import partial
+from mongoengine.queryset.queryset import QuerySet
+
+__all__ = ('queryset_manager', 'QuerySetManager')
+
+
+class QuerySetManager(object):
+ """
+ The default QuerySet Manager.
+
+ Custom QuerySet Manager functions can extend this class and users can
+ add extra queryset functionality. Any custom manager methods must accept a
+ :class:`~mongoengine.Document` class as its first argument, and a
+ :class:`~mongoengine.queryset.QuerySet` as its second argument.
+
+ The method function should return a :class:`~mongoengine.queryset.QuerySet`
+ , probably the same one that was passed in, but modified in some way.
+ """
+
+ get_queryset = None
+ default = QuerySet
+
+ def __init__(self, queryset_func=None):
+ if queryset_func:
+ self.get_queryset = queryset_func
+
+ def __get__(self, instance, owner):
+ """Descriptor for instantiating a new QuerySet object when
+ Document.objects is accessed.
+ """
+ if instance is not None:
+ # Document class being used rather than a document object
+ return self
+
+ # owner is the document that contains the QuerySetManager
+ queryset_class = owner._meta.get('queryset_class', self.default)
+ queryset = queryset_class(owner, owner._get_collection())
+ if self.get_queryset:
+ arg_count = self.get_queryset.func_code.co_argcount
+ if arg_count == 1:
+ queryset = self.get_queryset(queryset)
+ elif arg_count == 2:
+ queryset = self.get_queryset(owner, queryset)
+ else:
+ queryset = partial(self.get_queryset, owner, queryset)
+ return queryset
+
+
+def queryset_manager(func):
+ """Decorator that allows you to define custom QuerySet managers on
+ :class:`~mongoengine.Document` classes. The manager must be a function that
+ accepts a :class:`~mongoengine.Document` class as its first argument, and a
+ :class:`~mongoengine.queryset.QuerySet` as its second argument. The method
+ function should return a :class:`~mongoengine.queryset.QuerySet`, probably
+ the same one that was passed in, but modified in some way.
+ """
+ return QuerySetManager(func)
diff --git a/awx/lib/site-packages/mongoengine/queryset/queryset.py b/awx/lib/site-packages/mongoengine/queryset/queryset.py
new file mode 100644
index 0000000000..e8bd66ae0a
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/queryset/queryset.py
@@ -0,0 +1,164 @@
+from mongoengine.errors import OperationError
+from mongoengine.queryset.base import (BaseQuerySet, DO_NOTHING, NULLIFY,
+ CASCADE, DENY, PULL)
+
+__all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE',
+ 'DENY', 'PULL')
+
+# The maximum number of items to display in a QuerySet.__repr__
+REPR_OUTPUT_SIZE = 20
+ITER_CHUNK_SIZE = 100
+
+
+class QuerySet(BaseQuerySet):
+ """The default queryset, that builds queries and handles a set of results
+ returned from a query.
+
+ Wraps a MongoDB cursor, providing :class:`~mongoengine.Document` objects as
+ the results.
+ """
+
+ _has_more = True
+ _len = None
+ _result_cache = None
+
+ def __iter__(self):
+ """Iteration utilises a results cache which iterates the cursor
+ in batches of ``ITER_CHUNK_SIZE``.
+
+ If ``self._has_more`` the cursor hasn't been exhausted so cache then
+ batch. Otherwise iterate the result_cache.
+ """
+ self._iter = True
+ if self._has_more:
+ return self._iter_results()
+
+ # iterating over the cache.
+ return iter(self._result_cache)
+
+ def __len__(self):
+ """Since __len__ is called quite frequently (for example, as part of
+ list(qs) we populate the result cache and cache the length.
+ """
+ if self._len is not None:
+ return self._len
+ if self._has_more:
+ # populate the cache
+ list(self._iter_results())
+
+ self._len = len(self._result_cache)
+ return self._len
+
+ def __repr__(self):
+ """Provides the string representation of the QuerySet
+ """
+ if self._iter:
+ return '.. queryset mid-iteration ..'
+
+ self._populate_cache()
+ data = self._result_cache[:REPR_OUTPUT_SIZE + 1]
+ if len(data) > REPR_OUTPUT_SIZE:
+ data[-1] = "...(remaining elements truncated)..."
+ return repr(data)
+
+
+ def _iter_results(self):
+ """A generator for iterating over the result cache.
+
+ Also populates the cache if there are more possible results to yield.
+ Raises StopIteration when there are no more results"""
+ if self._result_cache is None:
+ self._result_cache = []
+ pos = 0
+ while True:
+ upper = len(self._result_cache)
+ while pos < upper:
+ yield self._result_cache[pos]
+ pos = pos + 1
+ if not self._has_more:
+ raise StopIteration
+ if len(self._result_cache) <= pos:
+ self._populate_cache()
+
+ def _populate_cache(self):
+ """
+ Populates the result cache with ``ITER_CHUNK_SIZE`` more entries
+ (until the cursor is exhausted).
+ """
+ if self._result_cache is None:
+ self._result_cache = []
+ if self._has_more:
+ try:
+ for i in xrange(ITER_CHUNK_SIZE):
+ self._result_cache.append(self.next())
+ except StopIteration:
+ self._has_more = False
+
+ def count(self, with_limit_and_skip=False):
+ """Count the selected elements in the query.
+
+ :param with_limit_and_skip (optional): take any :meth:`limit` or
+ :meth:`skip` that has been applied to this cursor into account when
+ getting the count
+ """
+ if with_limit_and_skip is False:
+ return super(QuerySet, self).count(with_limit_and_skip)
+
+ if self._len is None:
+ self._len = super(QuerySet, self).count(with_limit_and_skip)
+
+ return self._len
+
+ def no_cache(self):
+ """Convert to a non_caching queryset
+
+ .. versionadded:: 0.8.3 Convert to non caching queryset
+ """
+ if self._result_cache is not None:
+ raise OperationError("QuerySet already cached")
+ return self.clone_into(QuerySetNoCache(self._document, self._collection))
+
+
+class QuerySetNoCache(BaseQuerySet):
+ """A non caching QuerySet"""
+
+ def cache(self):
+ """Convert to a caching queryset
+
+ .. versionadded:: 0.8.3 Convert to caching queryset
+ """
+ return self.clone_into(QuerySet(self._document, self._collection))
+
+ def __repr__(self):
+ """Provides the string representation of the QuerySet
+
+ .. versionchanged:: 0.6.13 Now doesnt modify the cursor
+ """
+ if self._iter:
+ return '.. queryset mid-iteration ..'
+
+ data = []
+ for i in xrange(REPR_OUTPUT_SIZE + 1):
+ try:
+ data.append(self.next())
+ except StopIteration:
+ break
+ if len(data) > REPR_OUTPUT_SIZE:
+ data[-1] = "...(remaining elements truncated)..."
+
+ self.rewind()
+ return repr(data)
+
+ def __iter__(self):
+ queryset = self
+ if queryset._iter:
+ queryset = self.clone()
+ queryset.rewind()
+ return queryset
+
+
+class QuerySetNoDeRef(QuerySet):
+ """Special no_dereference QuerySet"""
+
+ def __dereference(items, max_depth=1, instance=None, name=None):
+ return items
\ No newline at end of file
diff --git a/awx/lib/site-packages/mongoengine/queryset/transform.py b/awx/lib/site-packages/mongoengine/queryset/transform.py
new file mode 100644
index 0000000000..03a09dc516
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/queryset/transform.py
@@ -0,0 +1,372 @@
+from collections import defaultdict
+
+import pymongo
+from bson import SON
+
+from mongoengine.connection import get_connection
+from mongoengine.common import _import_class
+from mongoengine.errors import InvalidQueryError, LookUpError
+
+__all__ = ('query', 'update')
+
+
+COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod',
+ 'all', 'size', 'exists', 'not', 'elemMatch', 'type')
+GEO_OPERATORS = ('within_distance', 'within_spherical_distance',
+ 'within_box', 'within_polygon', 'near', 'near_sphere',
+ 'max_distance', 'geo_within', 'geo_within_box',
+ 'geo_within_polygon', 'geo_within_center',
+ 'geo_within_sphere', 'geo_intersects')
+STRING_OPERATORS = ('contains', 'icontains', 'startswith',
+ 'istartswith', 'endswith', 'iendswith',
+ 'exact', 'iexact')
+CUSTOM_OPERATORS = ('match',)
+MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS +
+ STRING_OPERATORS + CUSTOM_OPERATORS)
+
+UPDATE_OPERATORS = ('set', 'unset', 'inc', 'dec', 'pop', 'push',
+ 'push_all', 'pull', 'pull_all', 'add_to_set',
+ 'set_on_insert', 'min', 'max')
+
+
+def query(_doc_cls=None, _field_operation=False, **query):
+ """Transform a query from Django-style format to Mongo format.
+ """
+ mongo_query = {}
+ merge_query = defaultdict(list)
+ for key, value in sorted(query.items()):
+ if key == "__raw__":
+ mongo_query.update(value)
+ continue
+
+ parts = key.rsplit('__')
+ indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
+ parts = [part for part in parts if not part.isdigit()]
+ # Check for an operator and transform to mongo-style if there is
+ op = None
+ if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
+ op = parts.pop()
+
+ negate = False
+ if len(parts) > 1 and parts[-1] == 'not':
+ parts.pop()
+ negate = True
+
+ if _doc_cls:
+ # Switch field names to proper names [set in Field(name='foo')]
+ try:
+ fields = _doc_cls._lookup_field(parts)
+ except Exception, e:
+ raise InvalidQueryError(e)
+ parts = []
+
+ CachedReferenceField = _import_class('CachedReferenceField')
+
+ cleaned_fields = []
+ for field in fields:
+ append_field = True
+ if isinstance(field, basestring):
+ parts.append(field)
+ append_field = False
+ # is last and CachedReferenceField
+ elif isinstance(field, CachedReferenceField) and fields[-1] == field:
+ parts.append('%s._id' % field.db_field)
+ else:
+ parts.append(field.db_field)
+
+ if append_field:
+ cleaned_fields.append(field)
+
+ # Convert value to proper value
+ field = cleaned_fields[-1]
+
+ singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not']
+ singular_ops += STRING_OPERATORS
+ if op in singular_ops:
+ if isinstance(field, basestring):
+ if (op in STRING_OPERATORS and
+ isinstance(value, basestring)):
+ StringField = _import_class('StringField')
+ value = StringField.prepare_query_value(op, value)
+ else:
+ value = field
+ else:
+ value = field.prepare_query_value(op, value)
+
+ if isinstance(field, CachedReferenceField) and value:
+ value = value['_id']
+
+ elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict):
+ # 'in', 'nin' and 'all' require a list of values
+ value = [field.prepare_query_value(op, v) for v in value]
+
+ # if op and op not in COMPARISON_OPERATORS:
+ if op:
+ if op in GEO_OPERATORS:
+ value = _geo_operator(field, op, value)
+ elif op in CUSTOM_OPERATORS:
+ if op in ('elem_match', 'match'):
+ value = field.prepare_query_value(op, value)
+ value = {"$elemMatch": value}
+ else:
+ NotImplementedError("Custom method '%s' has not "
+ "been implemented" % op)
+ elif op not in STRING_OPERATORS:
+ value = {'$' + op: value}
+
+ if negate:
+ value = {'$not': value}
+
+ for i, part in indices:
+ parts.insert(i, part)
+ key = '.'.join(parts)
+ if op is None or key not in mongo_query:
+ mongo_query[key] = value
+ elif key in mongo_query:
+ if key in mongo_query and isinstance(mongo_query[key], dict):
+ mongo_query[key].update(value)
+ # $maxDistance needs to come last - convert to SON
+ value_dict = mongo_query[key]
+ if ('$maxDistance' in value_dict and '$near' in value_dict):
+ value_son = SON()
+ if isinstance(value_dict['$near'], dict):
+ for k, v in value_dict.iteritems():
+ if k == '$maxDistance':
+ continue
+ value_son[k] = v
+ if (get_connection().max_wire_version <= 1):
+ value_son['$maxDistance'] = value_dict[
+ '$maxDistance']
+ else:
+ value_son['$near'] = SON(value_son['$near'])
+ value_son['$near'][
+ '$maxDistance'] = value_dict['$maxDistance']
+ else:
+ for k, v in value_dict.iteritems():
+ if k == '$maxDistance':
+ continue
+ value_son[k] = v
+ value_son['$maxDistance'] = value_dict['$maxDistance']
+
+ mongo_query[key] = value_son
+ else:
+ # Store for manually merging later
+ merge_query[key].append(value)
+
+ # The queryset has been filter in such a way we must manually merge
+ for k, v in merge_query.items():
+ merge_query[k].append(mongo_query[k])
+ del mongo_query[k]
+ if isinstance(v, list):
+ value = [{k: val} for val in v]
+ if '$and' in mongo_query.keys():
+ mongo_query['$and'].extend(value)
+ else:
+ mongo_query['$and'] = value
+
+ return mongo_query
+
+
+def update(_doc_cls=None, **update):
+ """Transform an update spec from Django-style format to Mongo format.
+ """
+ mongo_update = {}
+ for key, value in update.items():
+ if key == "__raw__":
+ mongo_update.update(value)
+ continue
+ parts = key.split('__')
+ # if there is no operator, default to "set"
+ if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
+ parts.insert(0, 'set')
+ # Check for an operator and transform to mongo-style if there is
+ op = None
+ if parts[0] in UPDATE_OPERATORS:
+ op = parts.pop(0)
+ # Convert Pythonic names to Mongo equivalents
+ if op in ('push_all', 'pull_all'):
+ op = op.replace('_all', 'All')
+ elif op == 'dec':
+ # Support decrement by flipping a positive value's sign
+ # and using 'inc'
+ op = 'inc'
+ if value > 0:
+ value = -value
+ elif op == 'add_to_set':
+ op = 'addToSet'
+ elif op == 'set_on_insert':
+ op = "setOnInsert"
+
+ match = None
+ if parts[-1] in COMPARISON_OPERATORS:
+ match = parts.pop()
+
+ if _doc_cls:
+ # Switch field names to proper names [set in Field(name='foo')]
+ try:
+ fields = _doc_cls._lookup_field(parts)
+ except Exception, e:
+ raise InvalidQueryError(e)
+ parts = []
+
+ cleaned_fields = []
+ appended_sub_field = False
+ for field in fields:
+ append_field = True
+ if isinstance(field, basestring):
+ # Convert the S operator to $
+ if field == 'S':
+ field = '$'
+ parts.append(field)
+ append_field = False
+ else:
+ parts.append(field.db_field)
+ if append_field:
+ appended_sub_field = False
+ cleaned_fields.append(field)
+ if hasattr(field, 'field'):
+ cleaned_fields.append(field.field)
+ appended_sub_field = True
+
+ # Convert value to proper value
+ if appended_sub_field:
+ field = cleaned_fields[-2]
+ else:
+ field = cleaned_fields[-1]
+
+ GeoJsonBaseField = _import_class("GeoJsonBaseField")
+ if isinstance(field, GeoJsonBaseField):
+ value = field.to_mongo(value)
+
+ if op in (None, 'set', 'push', 'pull'):
+ if field.required or value is not None:
+ value = field.prepare_query_value(op, value)
+ elif op in ('pushAll', 'pullAll'):
+ value = [field.prepare_query_value(op, v) for v in value]
+ elif op in ('addToSet', 'setOnInsert'):
+ if isinstance(value, (list, tuple, set)):
+ value = [field.prepare_query_value(op, v) for v in value]
+ elif field.required or value is not None:
+ value = field.prepare_query_value(op, value)
+ elif op == "unset":
+ value = 1
+
+ if match:
+ match = '$' + match
+ value = {match: value}
+
+ key = '.'.join(parts)
+
+ if not op:
+ raise InvalidQueryError("Updates must supply an operation "
+ "eg: set__FIELD=value")
+
+ if 'pull' in op and '.' in key:
+ # Dot operators don't work on pull operations
+ # unless they point to a list field
+ # Otherwise it uses nested dict syntax
+ if op == 'pullAll':
+ raise InvalidQueryError("pullAll operations only support "
+ "a single field depth")
+
+ # Look for the last list field and use dot notation until there
+ field_classes = [c.__class__ for c in cleaned_fields]
+ field_classes.reverse()
+ ListField = _import_class('ListField')
+ if ListField in field_classes:
+ # Join all fields via dot notation to the last ListField
+ # Then process as normal
+ last_listField = len(
+ cleaned_fields) - field_classes.index(ListField)
+ key = ".".join(parts[:last_listField])
+ parts = parts[last_listField:]
+ parts.insert(0, key)
+
+ parts.reverse()
+ for key in parts:
+ value = {key: value}
+ elif op == 'addToSet' and isinstance(value, list):
+ value = {key: {"$each": value}}
+ else:
+ value = {key: value}
+ key = '$' + op
+
+ if key not in mongo_update:
+ mongo_update[key] = value
+ elif key in mongo_update and isinstance(mongo_update[key], dict):
+ mongo_update[key].update(value)
+
+ return mongo_update
+
+
+def _geo_operator(field, op, value):
+ """Helper to return the query for a given geo query"""
+ if field._geo_index == pymongo.GEO2D:
+ if op == "within_distance":
+ value = {'$within': {'$center': value}}
+ elif op == "within_spherical_distance":
+ value = {'$within': {'$centerSphere': value}}
+ elif op == "within_polygon":
+ value = {'$within': {'$polygon': value}}
+ elif op == "near":
+ value = {'$near': value}
+ elif op == "near_sphere":
+ value = {'$nearSphere': value}
+ elif op == 'within_box':
+ value = {'$within': {'$box': value}}
+ elif op == "max_distance":
+ value = {'$maxDistance': value}
+ else:
+ raise NotImplementedError("Geo method '%s' has not "
+ "been implemented for a GeoPointField" % op)
+ else:
+ if op == "geo_within":
+ value = {"$geoWithin": _infer_geometry(value)}
+ elif op == "geo_within_box":
+ value = {"$geoWithin": {"$box": value}}
+ elif op == "geo_within_polygon":
+ value = {"$geoWithin": {"$polygon": value}}
+ elif op == "geo_within_center":
+ value = {"$geoWithin": {"$center": value}}
+ elif op == "geo_within_sphere":
+ value = {"$geoWithin": {"$centerSphere": value}}
+ elif op == "geo_intersects":
+ value = {"$geoIntersects": _infer_geometry(value)}
+ elif op == "near":
+ value = {'$near': _infer_geometry(value)}
+ elif op == "max_distance":
+ value = {'$maxDistance': value}
+ else:
+ raise NotImplementedError("Geo method '%s' has not "
+ "been implemented for a %s " % (op, field._name))
+ return value
+
+
+def _infer_geometry(value):
+ """Helper method that tries to infer the $geometry shape for a given value"""
+ if isinstance(value, dict):
+ if "$geometry" in value:
+ return value
+ elif 'coordinates' in value and 'type' in value:
+ return {"$geometry": value}
+ raise InvalidQueryError("Invalid $geometry dictionary should have "
+ "type and coordinates keys")
+ elif isinstance(value, (list, set)):
+ try:
+ value[0][0][0]
+ return {"$geometry": {"type": "Polygon", "coordinates": value}}
+ except:
+ pass
+ try:
+ value[0][0]
+ return {"$geometry": {"type": "LineString", "coordinates": value}}
+ except:
+ pass
+ try:
+ value[0]
+ return {"$geometry": {"type": "Point", "coordinates": value}}
+ except:
+ pass
+
+ raise InvalidQueryError("Invalid $geometry data. Can be either a dictionary "
+ "or (nested) lists of coordinate(s)")
diff --git a/awx/lib/site-packages/mongoengine/queryset/visitor.py b/awx/lib/site-packages/mongoengine/queryset/visitor.py
new file mode 100644
index 0000000000..e5d2e6152b
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/queryset/visitor.py
@@ -0,0 +1,162 @@
+import copy
+
+from itertools import product
+from functools import reduce
+
+from mongoengine.errors import InvalidQueryError
+from mongoengine.queryset import transform
+
+__all__ = ('Q',)
+
+
+class QNodeVisitor(object):
+ """Base visitor class for visiting Q-object nodes in a query tree.
+ """
+
+ def visit_combination(self, combination):
+ """Called by QCombination objects.
+ """
+ return combination
+
+ def visit_query(self, query):
+ """Called by (New)Q objects.
+ """
+ return query
+
+
+class DuplicateQueryConditionsError(InvalidQueryError):
+ pass
+
+
+class SimplificationVisitor(QNodeVisitor):
+ """Simplifies query trees by combining unnecessary 'and' connection nodes
+ into a single Q-object.
+ """
+
+ def visit_combination(self, combination):
+ if combination.operation == combination.AND:
+ # The simplification only applies to 'simple' queries
+ if all(isinstance(node, Q) for node in combination.children):
+ queries = [n.query for n in combination.children]
+ try:
+ return Q(**self._query_conjunction(queries))
+ except DuplicateQueryConditionsError:
+ # Cannot be simplified
+ pass
+ return combination
+
+ def _query_conjunction(self, queries):
+ """Merges query dicts - effectively &ing them together.
+ """
+ query_ops = set()
+ combined_query = {}
+ for query in queries:
+ ops = set(query.keys())
+ # Make sure that the same operation isn't applied more than once
+ # to a single field
+ intersection = ops.intersection(query_ops)
+ if intersection:
+ raise DuplicateQueryConditionsError()
+
+ query_ops.update(ops)
+ combined_query.update(copy.deepcopy(query))
+ return combined_query
+
+
+class QueryCompilerVisitor(QNodeVisitor):
+ """Compiles the nodes in a query tree to a PyMongo-compatible query
+ dictionary.
+ """
+
+ def __init__(self, document):
+ self.document = document
+
+ def visit_combination(self, combination):
+ operator = "$and"
+ if combination.operation == combination.OR:
+ operator = "$or"
+ return {operator: combination.children}
+
+ def visit_query(self, query):
+ return transform.query(self.document, **query.query)
+
+
+class QNode(object):
+ """Base class for nodes in query trees.
+ """
+
+ AND = 0
+ OR = 1
+
+ def to_query(self, document):
+ query = self.accept(SimplificationVisitor())
+ query = query.accept(QueryCompilerVisitor(document))
+ return query
+
+ def accept(self, visitor):
+ raise NotImplementedError
+
+ def _combine(self, other, operation):
+ """Combine this node with another node into a QCombination object.
+ """
+ if getattr(other, 'empty', True):
+ return self
+
+ if self.empty:
+ return other
+
+ return QCombination(operation, [self, other])
+
+ @property
+ def empty(self):
+ return False
+
+ def __or__(self, other):
+ return self._combine(other, self.OR)
+
+ def __and__(self, other):
+ return self._combine(other, self.AND)
+
+
+class QCombination(QNode):
+ """Represents the combination of several conditions by a given logical
+ operator.
+ """
+
+ def __init__(self, operation, children):
+ self.operation = operation
+ self.children = []
+ for node in children:
+ # If the child is a combination of the same type, we can merge its
+ # children directly into this combinations children
+ if isinstance(node, QCombination) and node.operation == operation:
+ self.children += node.children
+ else:
+ self.children.append(node)
+
+ def accept(self, visitor):
+ for i in range(len(self.children)):
+ if isinstance(self.children[i], QNode):
+ self.children[i] = self.children[i].accept(visitor)
+
+ return visitor.visit_combination(self)
+
+ @property
+ def empty(self):
+ return not bool(self.children)
+
+
+class Q(QNode):
+ """A simple query object, used in a query tree to build up more complex
+ query structures.
+ """
+
+ def __init__(self, **query):
+ self.query = query
+
+ def accept(self, visitor):
+ return visitor.visit_query(self)
+
+ @property
+ def empty(self):
+ return not bool(self.query)
diff --git a/awx/lib/site-packages/mongoengine/signals.py b/awx/lib/site-packages/mongoengine/signals.py
new file mode 100644
index 0000000000..06fb8b4f11
--- /dev/null
+++ b/awx/lib/site-packages/mongoengine/signals.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+
+__all__ = ['pre_init', 'post_init', 'pre_save', 'pre_save_post_validation',
+ 'post_save', 'pre_delete', 'post_delete']
+
+signals_available = False
+try:
+ from blinker import Namespace
+ signals_available = True
+except ImportError:
+ class Namespace(object):
+ def signal(self, name, doc=None):
+ return _FakeSignal(name, doc)
+
+ class _FakeSignal(object):
+ """If blinker is unavailable, create a fake class with the same
+ interface that allows sending of signals but will fail with an
+ error on anything else. Instead of doing anything on send, it
+ will just ignore the arguments and do nothing instead.
+ """
+
+ def __init__(self, name, doc=None):
+ self.name = name
+ self.__doc__ = doc
+
+ def _fail(self, *args, **kwargs):
+ raise RuntimeError('signalling support is unavailable '
+ 'because the blinker library is '
+ 'not installed.')
+ send = lambda *a, **kw: None
+ connect = disconnect = has_receivers_for = receivers_for = \
+ temporarily_connected_to = _fail
+ del _fail
+
+# the namespace for code signals. If you are not mongoengine code, do
+# not put signals in here. Create your own namespace instead.
+_signals = Namespace()
+
+pre_init = _signals.signal('pre_init')
+post_init = _signals.signal('post_init')
+pre_save = _signals.signal('pre_save')
+pre_save_post_validation = _signals.signal('pre_save_post_validation')
+post_save = _signals.signal('post_save')
+pre_delete = _signals.signal('pre_delete')
+post_delete = _signals.signal('post_delete')
+pre_bulk_insert = _signals.signal('pre_bulk_insert')
+post_bulk_insert = _signals.signal('post_bulk_insert')
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 5f92477cab..bd08f016cf 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -1,10 +1,11 @@
# PIP requirements for AWX development/build environment (downloaded from
-# PyPI). Install using "pip -r dev.txt".
+# PyPI). Install using "pip install -r dev.txt".
-U distribute
# Packages used for both development and production:
Django>=1.6.7,<1.7
+pymongo>=2.8
# The following packages and their dependencies are bundled with AWX
# (in awx/lib/site-packages):
@@ -25,6 +26,7 @@ Django>=1.6.7,<1.7
#redis
#requests
#South>=0.8,<2.0
+ #mongoengine>=0.9.0
# Development-only packages:
django-debug-toolbar
diff --git a/requirements/dev_local.txt b/requirements/dev_local.txt
index 82968bc54d..7579a37993 100644
--- a/requirements/dev_local.txt
+++ b/requirements/dev_local.txt
@@ -1,10 +1,11 @@
# PIP requirements for AWX development/build environment (using only local
-# packages). Install using "pip --no-index -r dev_local.txt".
+# packages). Install using "pip install --no-index -r dev_local.txt".
# May need to install these packages separately the first time:
setuptools-12.0.5.tar.gz
distribute-0.7.3.zip
Django-1.6.7.tar.gz
+mongo-python-driver-2.8.tar.gz
# The following packages are bundled with AWX (in awx/lib/site-packages):
# For Python2.6 support:
@@ -74,6 +75,7 @@ Django-1.6.7.tar.gz
#pyrax-1.7.2.tar.gz
#redis-2.10.3.tar.gz
#South-0.8.4.tar.gz
+ #mongoengine-0.9.0.tar.gz
# Dev-only packages:
# Needed by django-debug-toolbar:
@@ -102,3 +104,7 @@ ipython-1.2.1.tar.gz
# - readline-6.2.4.1.tar.gz (for the ipython shell)
# - python-zmq (for using the job callback receiver)
# - python-memcached (for host details caching)
+# - pycrypto (via pip install pycrypto)
+# - pyaml (via pip install pyaml)
+# - python-ldap (via pip install python-ldap)
+# - pyzmq (via pip install pyzmq)
diff --git a/requirements/mongo-python-driver-2.8.tar.gz b/requirements/mongo-python-driver-2.8.tar.gz
new file mode 100644
index 0000000000..fb21a3dcf1
Binary files /dev/null and b/requirements/mongo-python-driver-2.8.tar.gz differ
diff --git a/requirements/mongoengine-0.9.0.tar.gz b/requirements/mongoengine-0.9.0.tar.gz
new file mode 100644
index 0000000000..dc71b34258
Binary files /dev/null and b/requirements/mongoengine-0.9.0.tar.gz differ
diff --git a/requirements/prod.txt b/requirements/prod.txt
index 660f1abf53..89c9ff38e3 100644
--- a/requirements/prod.txt
+++ b/requirements/prod.txt
@@ -1,7 +1,8 @@
# PIP requirements for AWX production environment (downloaded from PyPI).
-# Install using "pip -r prod.txt".
+# Install using "pip install -r prod.txt".
Django>=1.4
+pymongo>=2.8
# The following packages and their dependencies are bundled with AWX
# (in awx/lib/site-packages):
@@ -23,6 +24,7 @@ Django>=1.4
#redis
#requests
#South>=0.8,<2.0
+ #mongoengine>=0.9.0
# You may also need to install the following extra packages using the OS
# package manager, or pip if you're running inside a virtualenv.
diff --git a/requirements/prod_local.txt b/requirements/prod_local.txt
index c387f53c0f..1f52392b5e 100644
--- a/requirements/prod_local.txt
+++ b/requirements/prod_local.txt
@@ -1,8 +1,9 @@
# PIP requirements for AWX production environment (using only local packages).
-# Install using "pip --no-index -r prod_local.txt".
+# Install using "pip install --no-index -r prod_local.txt".
# May need to install this package separately the first time:
Django-1.5.5.tar.gz
+mongo-python-driver-2.8.tar.gz
# The following packages are bundled with AWX (in awx/lib/site-packages):
# For Python2.6 support:
@@ -72,6 +73,7 @@ Django-1.5.5.tar.gz
#pyrax-1.7.2.tar.gz
#redis-2.10.3.tar.gz
#South-0.8.4.tar.gz
+ #mongoengine-0.9.0.tar.gz
# You may also need to install the following extra packages using the OS
# package manager, or pip if you're running inside a virtualenv.