summaryrefslogtreecommitdiff
path: root/src/leap/bitmask/mail/adaptors
diff options
context:
space:
mode:
Diffstat (limited to 'src/leap/bitmask/mail/adaptors')
-rw-r--r--src/leap/bitmask/mail/adaptors/__init__.py0
-rw-r--r--src/leap/bitmask/mail/adaptors/models.py123
-rw-r--r--src/leap/bitmask/mail/adaptors/soledad.py1268
-rw-r--r--src/leap/bitmask/mail/adaptors/soledad_indexes.py106
l---------src/leap/bitmask/mail/adaptors/tests/rfc822.message1
-rw-r--r--src/leap/bitmask/mail/adaptors/tests/test_models.py106
-rw-r--r--src/leap/bitmask/mail/adaptors/tests/test_soledad_adaptor.py529
7 files changed, 2133 insertions, 0 deletions
diff --git a/src/leap/bitmask/mail/adaptors/__init__.py b/src/leap/bitmask/mail/adaptors/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/leap/bitmask/mail/adaptors/__init__.py
diff --git a/src/leap/bitmask/mail/adaptors/models.py b/src/leap/bitmask/mail/adaptors/models.py
new file mode 100644
index 0000000..49460f7
--- /dev/null
+++ b/src/leap/bitmask/mail/adaptors/models.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+# models.py
+# Copyright (C) 2014 LEAP
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+Generic Models to be used by the Document Adaptors.
+"""
+import copy
+
+
+class SerializableModel(object):
+ """
+ A Generic document model, that can be serialized into a dictionary.
+
+ Subclasses of this `SerializableModel` are meant to be added as class
+ attributes of classes inheriting from DocumentWrapper.
+
+ A subclass __meta__ of this SerializableModel might exist, and contain info
+ relative to particularities of this model.
+
+ For instance, the use of `__meta__.index` marks the existence of a primary
+ index in the model, which will be used to do unique queries (in which case
+ all the other indexed fields in the underlying document will be filled with
+ the default info contained in the model definition).
+ """
+
+ @classmethod
+ def serialize(klass):
+ """
+ Get a dictionary representation of the public attributes in the model
+ class. To avoid collisions with builtin functions, any occurrence of an
+ attribute ended in '_' (like 'type_') will be normalized by removing
+ the trailing underscore.
+
+ This classmethod is used from within the serialized method of a
+ DocumentWrapper instance: it provides defaults for the
+ empty document.
+ """
+ assert isinstance(klass, type)
+ return _normalize_dict(klass.__dict__)
+
+
+class DocumentWrapper(object):
+ """
+ A Wrapper object that can be manipulated, passed around, and serialized in
+ a format that the store understands.
+ It is related to a SerializableModel, which must be specified as the
+ ``model`` class attribute. The instance of this DocumentWrapper will not
+ allow any other *public* attributes than those defined in the corresponding
+ model.
+ """
+ # TODO we could do some very basic type checking here
+ # TODO set a dirty flag (on __setattr__, whenever the value is != from
+ # before)
+ # TODO we could enforce the existence of a correct "model" attribute
+ # in some other way (other than in the initializer)
+
+ def __init__(self, **kwargs):
+ if not getattr(self, 'model', None):
+ raise RuntimeError(
+ 'DocumentWrapper class needs a model attribute')
+
+ defaults = self.model.serialize()
+
+ if kwargs:
+ values = copy.deepcopy(defaults)
+ values.update(_normalize_dict(kwargs))
+ else:
+ values = defaults
+
+ for k, v in values.items():
+ k = k.replace('-', '_')
+ setattr(self, k, v)
+
+ def __setattr__(self, attr, value):
+ normalized = _normalize_dict(self.model.__dict__)
+ if not attr.startswith('_') and attr not in normalized:
+ raise RuntimeError(
+ "Cannot set attribute because it's not defined "
+ "in the model %s: %s" % (self.__class__, attr))
+ object.__setattr__(self, attr, value)
+
+ def serialize(self):
+ return _normalize_dict(self.__dict__)
+
+ def create(self):
+ raise NotImplementedError()
+
+ def update(self):
+ raise NotImplementedError()
+
+ def delete(self):
+ raise NotImplementedError()
+
+ @classmethod
+ def get_or_create(self):
+ raise NotImplementedError()
+
+ @classmethod
+ def get_all(self):
+ raise NotImplementedError()
+
+
+def _normalize_dict(_dict):
+ items = _dict.items()
+ items = filter(lambda (k, v): not callable(v), items)
+ items = filter(lambda (k, v): not k.startswith('_'), items)
+ items = [(k, v) if not k.endswith('_') else (k[:-1], v)
+ for (k, v) in items]
+ items = [(k.replace('-', '_'), v) for (k, v) in items]
+ return dict(items)
diff --git a/src/leap/bitmask/mail/adaptors/soledad.py b/src/leap/bitmask/mail/adaptors/soledad.py
new file mode 100644
index 0000000..ca8f741
--- /dev/null
+++ b/src/leap/bitmask/mail/adaptors/soledad.py
@@ -0,0 +1,1268 @@
+# soledad.py
+# Copyright (C) 2014 LEAP
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+Soledadad MailAdaptor module.
+"""
+import logging
+import re
+
+from collections import defaultdict
+from email import message_from_string
+
+from twisted.internet import defer
+from twisted.python import log
+from zope.interface import implements
+from leap.soledad.common import l2db
+
+from leap.common.check import leap_assert, leap_assert_type
+
+from leap.mail import constants
+from leap.mail import walk
+from leap.mail.adaptors import soledad_indexes as indexes
+from leap.mail.constants import INBOX_NAME
+from leap.mail.adaptors import models
+from leap.mail.imap.mailbox import normalize_mailbox
+from leap.mail.utils import lowerdict, first
+from leap.mail.utils import stringify_parts_map
+from leap.mail.interfaces import IMailAdaptor, IMessageWrapper
+
+from leap.soledad.common.document import SoledadDocument
+
+
+logger = logging.getLogger(__name__)
+
+# TODO
+# [ ] Convenience function to create mail specifying subject, date, etc?
+
+
+_MSGID_PATTERN = r"""<([\w@.]+)>"""
+_MSGID_RE = re.compile(_MSGID_PATTERN)
+
+
+class DuplicatedDocumentError(Exception):
+ """
+ Raised when a duplicated document is detected.
+ """
+ pass
+
+
+def cleanup_deferred_locks():
+ """
+ Need to use this from within trial to cleanup the reactor before
+ each run.
+ """
+ SoledadDocumentWrapper._k_locks = defaultdict(defer.DeferredLock)
+
+
+class SoledadDocumentWrapper(models.DocumentWrapper):
+ """
+ A Wrapper object that can be manipulated, passed around, and serialized in
+ a format that the Soledad Store understands.
+
+ It ensures atomicity of the document operations on creation, update and
+ deletion.
+ """
+ # TODO we could also use a _dirty flag (in models)
+ # TODO add a get_count() method ??? -- that is extended over l2db.
+
+ # We keep a dictionary with DeferredLocks, that will be
+ # unique to every subclass of SoledadDocumentWrapper.
+ _k_locks = defaultdict(defer.DeferredLock)
+
+ @classmethod
+ def _get_klass_lock(cls):
+ """
+ Get a DeferredLock that is unique for this subclass name.
+ Used to lock the access to indexes in the `get_or_create` call
+ for a particular DocumentWrapper.
+ """
+ return cls._k_locks[cls.__name__]
+
+ def __init__(self, doc_id=None, future_doc_id=None, **kwargs):
+ self._doc_id = doc_id
+ self._future_doc_id = future_doc_id
+ self._lock = defer.DeferredLock()
+ super(SoledadDocumentWrapper, self).__init__(**kwargs)
+
+ @property
+ def doc_id(self):
+ return self._doc_id
+
+ @property
+ def future_doc_id(self):
+ return self._future_doc_id
+
+ def set_future_doc_id(self, doc_id):
+ self._future_doc_id = doc_id
+
+ def create(self, store, is_copy=False):
+ """
+ Create the documents for this wrapper.
+ Since this method will not check for duplication, the
+ responsibility of avoiding duplicates is left to the caller.
+
+ You might be interested in using `get_or_create` classmethod
+ instead (that's the preferred way of creating documents from
+ the wrapper object).
+
+ :return: a deferred that will fire when the underlying
+ Soledad document has been created.
+ :rtype: Deferred
+ """
+ leap_assert(self._doc_id is None,
+ "This document already has a doc_id!")
+
+ def update_doc_id(doc):
+ self._doc_id = doc.doc_id
+ self.set_future_doc_id(None)
+ return doc
+
+ def update_wrapper(failure):
+ # In the case of some copies (for instance, from one folder to
+ # another and back to the original folder), the document that we
+ # want to insert already exists. In this case, putting it
+ # and overwriting the document with that doc_id is the right thing
+ # to do.
+ failure.trap(l2db.errors.RevisionConflict)
+ self._doc_id = self.future_doc_id
+ self._future_doc_id = None
+ return self.update(store)
+
+ if self.future_doc_id is None:
+ d = store.create_doc(self.serialize())
+ else:
+ d = store.create_doc(self.serialize(),
+ doc_id=self.future_doc_id)
+ d.addCallback(update_doc_id)
+
+ if is_copy:
+ d.addErrback(update_wrapper)
+ else:
+ d.addErrback(self._catch_revision_conflict, self.future_doc_id)
+ return d
+
+ def update(self, store):
+ """
+ Update the documents for this wrapper.
+
+ :return: a deferred that will fire when the underlying
+ Soledad document has been updated.
+ :rtype: Deferred
+ """
+ # the deferred lock guards against revision conflicts
+ return self._lock.run(self._update, store)
+
+ def _update(self, store):
+ leap_assert(self._doc_id is not None,
+ "Need to create doc before updating")
+
+ def update_and_put_doc(doc):
+ doc.content.update(self.serialize())
+ d = store.put_doc(doc)
+ d.addErrback(self._catch_revision_conflict, doc.doc_id)
+ return d
+
+ d = store.get_doc(self._doc_id)
+ d.addCallback(update_and_put_doc)
+ return d
+
+ def _catch_revision_conflict(self, failure, doc_id):
+ # XXX We can have some RevisionConflicts if we try
+ # to put the docs that are already there.
+ # This can happen right now when creating/saving the cdocs
+ # during a copy. Instead of catching and ignoring this
+ # error, we should mark them in the copy so there is no attempt to
+ # create/update them.
+ failure.trap(l2db.errors.RevisionConflict)
+ logger.debug("Got conflict while putting %s" % doc_id)
+
+ def delete(self, store):
+ """
+ Delete the documents for this wrapper.
+
+ :return: a deferred that will fire when the underlying
+ Soledad document has been deleted.
+ :rtype: Deferred
+ """
+ # the deferred lock guards against conflicts while updating
+ return self._lock.run(self._delete, store)
+
+ def _delete(self, store):
+ leap_assert(self._doc_id is not None,
+ "Need to create doc before deleting")
+ # XXX might want to flag this DocumentWrapper to avoid
+ # updating it by mistake. This could go in models.DocumentWrapper
+
+ def delete_doc(doc):
+ return store.delete_doc(doc)
+
+ d = store.get_doc(self._doc_id)
+ d.addCallback(delete_doc)
+ return d
+
+ @classmethod
+ def get_or_create(cls, store, index, value):
+ """
+ Get a unique DocumentWrapper by index, or create a new one if the
+ matching query does not exist.
+
+ :param index: the primary index for the model.
+ :type index: str
+ :param value: the value to query the primary index.
+ :type value: str
+
+ :return: a deferred that will be fired with the SoledadDocumentWrapper
+ matching the index query, either existing or just created.
+ :rtype: Deferred
+ """
+ return cls._get_klass_lock().run(
+ cls._get_or_create, store, index, value)
+
+ @classmethod
+ def _get_or_create(cls, store, index, value):
+ # TODO shorten this method.
+ assert store is not None
+ assert index is not None
+ assert value is not None
+
+ def get_main_index():
+ try:
+ return cls.model.__meta__.index
+ except AttributeError:
+ raise RuntimeError("The model is badly defined")
+
+ # TODO separate into another method?
+ def try_to_get_doc_from_index(indexes):
+ values = []
+ idx_def = dict(indexes)[index]
+ if len(idx_def) == 1:
+ values = [value]
+ else:
+ main_index = get_main_index()
+ fields = cls.model.serialize()
+ for field in idx_def:
+ if field == main_index:
+ values.append(value)
+ else:
+ values.append(fields[field])
+ d = store.get_from_index(index, *values)
+ return d
+
+ def get_first_doc_if_any(docs):
+ if not docs:
+ return None
+ if len(docs) > 1:
+ raise DuplicatedDocumentError
+ return docs[0]
+
+ def wrap_existing_or_create_new(doc):
+ if doc:
+ return cls(doc_id=doc.doc_id, **doc.content)
+ else:
+ return create_and_wrap_new_doc()
+
+ def create_and_wrap_new_doc():
+ # XXX use closure to store indexes instead of
+ # querying for them again.
+ d = store.list_indexes()
+ d.addCallback(get_wrapper_instance_from_index)
+ d.addCallback(return_wrapper_when_created)
+ return d
+
+ def get_wrapper_instance_from_index(indexes):
+ init_values = {}
+ idx_def = dict(indexes)[index]
+ if len(idx_def) == 1:
+ init_value = {idx_def[0]: value}
+ return cls(**init_value)
+ main_index = get_main_index()
+ fields = cls.model.serialize()
+ for field in idx_def:
+ if field == main_index:
+ init_values[field] = value
+ else:
+ init_values[field] = fields[field]
+ return cls(**init_values)
+
+ def return_wrapper_when_created(wrapper):
+ d = wrapper.create(store)
+ d.addCallback(lambda doc: wrapper)
+ return d
+
+ d = store.list_indexes()
+ d.addCallback(try_to_get_doc_from_index)
+ d.addCallback(get_first_doc_if_any)
+ d.addCallback(wrap_existing_or_create_new)
+ return d
+
+ @classmethod
+ def get_all(cls, store):
+ """
+ Get a collection of wrappers around all the documents belonging
+ to this kind.
+
+ For this to work, the model.__meta__ needs to include a tuple with
+ the index to be used for listing purposes, and which is the field to be
+ used to query the index.
+
+ Note that this method only supports indexes of a single field at the
+ moment. It also might be too expensive to return all the documents
+ matching the query, so handle with care.
+
+ class __meta__(object):
+ index = "name"
+ list_index = ("by-type", "type_")
+
+ :return: a deferred that will be fired with an iterable containing
+ as many SoledadDocumentWrapper are matching the index defined
+ in the model as the `list_index`.
+ :rtype: Deferred
+ """
+ # TODO LIST (get_all)
+ # [ ] extend support to indexes with n-ples
+ # [ ] benchmark the cost of querying and returning indexes in a big
+ # database. This might badly need pagination before being put to
+ # serious use.
+ return cls._get_klass_lock().run(cls._get_all, store)
+
+ @classmethod
+ def _get_all(cls, store):
+ try:
+ list_index, list_attr = cls.model.__meta__.list_index
+ except AttributeError:
+ raise RuntimeError("The model is badly defined: no list_index")
+ try:
+ index_value = getattr(cls.model, list_attr)
+ except AttributeError:
+ raise RuntimeError("The model is badly defined: "
+ "no attribute matching list_index")
+
+ def wrap_docs(docs):
+ return (cls(doc_id=doc.doc_id, **doc.content) for doc in docs)
+
+ d = store.get_from_index(list_index, index_value)
+ d.addCallback(wrap_docs)
+ return d
+
+ def __repr__(self):
+ try:
+ idx = getattr(self, self.model.__meta__.index)
+ except AttributeError:
+ idx = ""
+ return "<%s: %s (%s)>" % (self.__class__.__name__,
+ idx, self._doc_id)
+
+
+#
+# Message documents
+#
+
+class FlagsDocWrapper(SoledadDocumentWrapper):
+
+ class model(models.SerializableModel):
+ type_ = "flags"
+ chash = ""
+
+ mbox_uuid = ""
+ seen = False
+ deleted = False
+ recent = False
+ flags = []
+ tags = []
+ size = 0
+ multi = False
+
+ class __meta__(object):
+ index = "mbox"
+
+ def set_mbox_uuid(self, mbox_uuid):
+ # XXX raise error if already created, should use copy instead
+ mbox_uuid = mbox_uuid.replace('-', '_')
+ new_id = constants.FDOCID.format(mbox_uuid=mbox_uuid, chash=self.chash)
+ self._future_doc_id = new_id
+ self.mbox_uuid = mbox_uuid
+
+ def get_flags(self):
+ """
+ Get the flags for this message (as a tuple of strings, not unicode).
+ """
+ return map(str, self.flags)
+
+
+class HeaderDocWrapper(SoledadDocumentWrapper):
+
+ class model(models.SerializableModel):
+ type_ = "head"
+ chash = ""
+
+ date = ""
+ subject = ""
+ headers = {}
+ part_map = {}
+ body = "" # link to phash of body
+ msgid = ""
+ multi = False
+
+ class __meta__(object):
+ index = "chash"
+
+
+class ContentDocWrapper(SoledadDocumentWrapper):
+
+ class model(models.SerializableModel):
+ type_ = "cnt"
+ phash = ""
+
+ ctype = "" # XXX index by ctype too?
+ lkf = [] # XXX not implemented yet!
+ raw = ""
+
+ content_disposition = ""
+ content_transfer_encoding = ""
+ content_type = ""
+
+ class __meta__(object):
+ index = "phash"
+
+
+class MetaMsgDocWrapper(SoledadDocumentWrapper):
+
+ class model(models.SerializableModel):
+ type_ = "meta"
+ fdoc = ""
+ hdoc = ""
+ cdocs = []
+
+ def set_mbox_uuid(self, mbox_uuid):
+ # XXX raise error if already created, should use copy instead
+ mbox_uuid = mbox_uuid.replace('-', '_')
+ chash = re.findall(constants.FDOCID_CHASH_RE, self.fdoc)[0]
+ new_id = constants.METAMSGID.format(mbox_uuid=mbox_uuid, chash=chash)
+ new_fdoc_id = constants.FDOCID.format(mbox_uuid=mbox_uuid, chash=chash)
+ self._future_doc_id = new_id
+ self.fdoc = new_fdoc_id
+
+
+class MessageWrapper(object):
+
+ # This could benefit of a DeferredLock to create/update all the
+ # documents at the same time maybe, and defend against concurrent updates?
+
+ implements(IMessageWrapper)
+
+ def __init__(self, mdoc, fdoc, hdoc, cdocs=None, is_copy=False):
+ """
+ Need at least a metamsg-document, a flag-document and a header-document
+ to instantiate a MessageWrapper. Content-documents can be retrieved
+ lazily.
+
+ cdocs, if any, should be a dictionary in which the keys are ascending
+ integers, beginning at one, and the values are dictionaries with the
+ content of the content-docs.
+
+ is_copy, if set to True, will only attempt to create mdoc and fdoc
+ (because hdoc and cdocs are supposed to exist already)
+ """
+ self._is_copy = is_copy
+
+ def get_doc_wrapper(doc, cls):
+ if isinstance(doc, SoledadDocument):
+ doc_id = doc.doc_id
+ doc = doc.content
+ else:
+ doc_id = None
+ if not doc:
+ doc = {}
+ return cls(doc_id=doc_id, **doc)
+
+ self.mdoc = get_doc_wrapper(mdoc, MetaMsgDocWrapper)
+
+ self.fdoc = get_doc_wrapper(fdoc, FlagsDocWrapper)
+ self.fdoc.set_future_doc_id(self.mdoc.fdoc)
+
+ self.hdoc = get_doc_wrapper(hdoc, HeaderDocWrapper)
+ self.hdoc.set_future_doc_id(self.mdoc.hdoc)
+
+ if cdocs is None:
+ cdocs = {}
+ cdocs_keys = cdocs.keys()
+ assert sorted(cdocs_keys) == range(1, len(cdocs_keys) + 1)
+ self.cdocs = dict([
+ (key, get_doc_wrapper(doc, ContentDocWrapper))
+ for (key, doc) in cdocs.items()])
+ for doc_id, cdoc in zip(self.mdoc.cdocs, self.cdocs.values()):
+ if cdoc.raw == "":
+ log.msg("Empty raw field in cdoc %s" % doc_id)
+ cdoc.set_future_doc_id(doc_id)
+
+ def create(self, store, notify_just_mdoc=False, pending_inserts_dict=None):
+ """
+ Create all the parts for this message in the store.
+
+ :param store: an instance of Soledad
+
+ :param notify_just_mdoc:
+ if set to True, this method will return *only* the deferred
+ corresponding to the creation of the meta-message document.
+ Be warned that in that case there will be no record of failures
+ when creating the other part-documents.
+
+ Otherwise, this method will return a deferred that will wait for
+ the creation of all the part documents.
+
+ Setting this flag to True is mostly a convenient workaround for the
+ fact that massive serial appends will take too much time, and in
+ most of the cases the MUA will only switch to the mailbox where the
+ appends have happened after a certain time, which in most of the
+ times will be enough to have all the queued insert operations
+ finished.
+ :type notify_just_mdoc: bool
+ :param pending_inserts_dict:
+ a dictionary with the pending inserts ids.
+ :type pending_inserts_dict: dict
+
+ :return: a deferred whose callback will be called when either all the
+ part documents have been written, or just the metamsg-doc,
+ depending on the value of the notify_just_mdoc flag
+ :rtype: defer.Deferred
+ """
+ if pending_inserts_dict is None:
+ pending_inserts_dict = {}
+
+ leap_assert(self.cdocs,
+ "Need non empty cdocs to create the "
+ "MessageWrapper documents")
+ leap_assert(self.mdoc.doc_id is None,
+ "Cannot create: mdoc has a doc_id")
+ leap_assert(self.fdoc.doc_id is None,
+ "Cannot create: fdoc has a doc_id")
+
+ def unblock_pending_insert(result):
+ if pending_inserts_dict:
+ ci_headers = lowerdict(self.hdoc.headers)
+ msgid = ci_headers.get('message-id', None)
+ try:
+ d = pending_inserts_dict[msgid]
+ d.callback(msgid)
+ except KeyError:
+ pass
+ return result
+
+ # TODO check that the doc_ids in the mdoc are coherent
+ self.d = []
+
+ mdoc_created = self.mdoc.create(store, is_copy=self._is_copy)
+ fdoc_created = self.fdoc.create(store, is_copy=self._is_copy)
+
+ mdoc_created.addErrback(lambda f: log.err(f))
+ fdoc_created.addErrback(lambda f: log.err(f))
+
+ self.d.append(mdoc_created)
+ self.d.append(fdoc_created)
+
+ if not self._is_copy:
+ if self.hdoc.doc_id is None:
+ self.d.append(self.hdoc.create(store))
+ for cdoc in self.cdocs.values():
+ if cdoc.doc_id is not None:
+ # we could be just linking to an existing
+ # content-doc.
+ continue
+ self.d.append(cdoc.create(store))
+
+ def log_all_inserted(result):
+ log.msg("All parts inserted for msg!")
+ return result
+
+ self.all_inserted_d = defer.gatherResults(self.d, consumeErrors=True)
+ self.all_inserted_d.addCallback(log_all_inserted)
+ self.all_inserted_d.addCallback(unblock_pending_insert)
+ self.all_inserted_d.addErrback(lambda failure: log.err(failure))
+
+ if notify_just_mdoc:
+ return mdoc_created
+ else:
+ return self.all_inserted_d
+
+ def update(self, store):
+ """
+ Update the only mutable parts, which are within the flags document.
+ """
+ return self.fdoc.update(store)
+
+ def delete(self, store):
+ # TODO
+ # Eventually this would have to do the duplicate search or send for the
+ # garbage collector. At least mdoc and t the mdoc and fdoc can be
+ # unlinked.
+ d = []
+ if self.mdoc.doc_id:
+ d.append(self.mdoc.delete(store))
+ d.append(self.fdoc.delete(store))
+ return defer.gatherResults(d)
+
+ def copy(self, store, new_mbox_uuid):
+ """
+ Return a copy of this MessageWrapper in a new mailbox.
+
+ :param store: an instance of Soledad, or anything that behaves alike.
+ :param new_mbox_uuid: the uuid of the mailbox where we are copying this
+ message to.
+ :type new_mbox_uuid: str
+ :rtype: MessageWrapper
+ """
+ new_mdoc = self.mdoc.serialize()
+ new_fdoc = self.fdoc.serialize()
+
+ # the future doc_ids is properly set because we modified
+ # the pointers in mdoc, which has precedence.
+ new_wrapper = MessageWrapper(new_mdoc, new_fdoc, None, None,
+ is_copy=True)
+ new_wrapper.hdoc = self.hdoc
+ new_wrapper.cdocs = self.cdocs
+ new_wrapper.set_mbox_uuid(new_mbox_uuid)
+
+ # XXX could flag so that it only creates mdoc/fdoc...
+
+ d = new_wrapper.create(store)
+ d.addCallback(lambda result: new_wrapper)
+ d.addErrback(lambda failure: log.err(failure))
+ return d
+
+ def set_mbox_uuid(self, mbox_uuid):
+ """
+ Set the mailbox for this wrapper.
+ This method should only be used before the Documents for the
+ MessageWrapper have been created, will raise otherwise.
+ """
+ mbox_uuid = mbox_uuid.replace('-', '_')
+ self.mdoc.set_mbox_uuid(mbox_uuid)
+ self.fdoc.set_mbox_uuid(mbox_uuid)
+
+ def set_flags(self, flags):
+ # TODO serialize the get + update
+ if flags is None:
+ flags = tuple()
+ leap_assert_type(flags, tuple)
+ self.fdoc.flags = list(flags)
+ self.fdoc.deleted = "\\Deleted" in flags
+ self.fdoc.seen = "\\Seen" in flags
+ self.fdoc.recent = "\\Recent" in flags
+
+ def set_tags(self, tags):
+ # TODO serialize the get + update
+ if tags is None:
+ tags = tuple()
+ leap_assert_type(tags, tuple)
+ self.fdoc.tags = list(tags)
+
+ def set_date(self, date):
+ # XXX assert valid date format
+ self.hdoc.date = date
+
+ def get_subpart_dict(self, index):
+ """
+ :param index: the part to lookup, 1-indexed
+ :type index: int
+ :rtype: dict
+ """
+ return self.hdoc.part_map[str(index)]
+
+ def get_subpart_indexes(self):
+ return self.hdoc.part_map.keys()
+
+ def get_body(self, store):
+ """
+ :rtype: deferred
+ """
+ body_phash = self.hdoc.body
+ if body_phash:
+ d = store.get_doc('C-' + body_phash)
+ d.addCallback(lambda doc: ContentDocWrapper(**doc.content))
+ return d
+ elif self.cdocs:
+ return self.cdocs[1]
+ else:
+ return ''
+
+#
+# Mailboxes
+#
+
+
+class MailboxWrapper(SoledadDocumentWrapper):
+
+ class model(models.SerializableModel):
+ type_ = "mbox"
+ mbox = INBOX_NAME
+ uuid = None
+ flags = []
+ recent = []
+ created = 1
+ closed = False
+ subscribed = False
+
+ class __meta__(object):
+ index = "mbox"
+ list_index = (indexes.TYPE_IDX, 'type_')
+
+
+#
+# Soledad Adaptor
+#
+
+class SoledadIndexMixin(object):
+ """
+ This will need a class attribute `indexes`, that is a dictionary containing
+ the index definitions for the underlying l2db store underlying soledad.
+
+ It needs to be in the following format:
+ {'index-name': ['field1', 'field2']}
+
+ You can also add a class attribute `wait_for_indexes` to any class
+ inheriting from this Mixin, that should be a list of strings representing
+ the methods that need to wait until the indexes have been initialized
+ before being able to work properly.
+ """
+ # TODO move this mixin to soledad itself
+ # so that each application can pass a set of indexes for their data model.
+
+ # TODO could have a wrapper class for indexes, supporting introspection
+ # and __getattr__
+
+ # TODO make this an interface?
+
+ indexes = {}
+ wait_for_indexes = []
+ store_ready = False
+
+ def initialize_store(self, store):
+ """
+ Initialize the indexes in the database.
+
+ :param store: store
+ :returns: a Deferred that will fire when the store is correctly
+ initialized.
+ :rtype: deferred
+ """
+ # TODO I think we *should* get another deferredLock in here, but
+ # global to the soledad namespace, to protect from several points
+ # initializing soledad indexes at the same time.
+ self._wait_for_indexes()
+
+ d = self._init_indexes(store)
+ d.addCallback(self._restore_waiting_methods)
+ return d
+
+ def _init_indexes(self, store):
+ """
+ Initialize the database indexes.
+ """
+ leap_assert(store, "Cannot init indexes with null soledad")
+ leap_assert_type(self.indexes, dict)
+
+ def _create_index(name, expression):
+ return store.create_index(name, *expression)
+
+ def init_idexes(indexes):
+ deferreds = []
+ db_indexes = dict(indexes)
+ # Loop through the indexes we expect to find.
+ for name, expression in self.indexes.items():
+ if name not in db_indexes:
+ # The index does not yet exist.
+ d = _create_index(name, expression)
+ deferreds.append(d)
+ elif expression != db_indexes[name]:
+ # The index exists but the definition is not what expected,
+ # so we delete it and add the proper index expression.
+ d = store.delete_index(name)
+ d.addCallback(
+ lambda _: _create_index(name, *expression))
+ deferreds.append(d)
+ return defer.gatherResults(deferreds, consumeErrors=True)
+
+ def store_ready(whatever):
+ self.store_ready = True
+ return whatever
+
+ self.deferred_indexes = store.list_indexes()
+ self.deferred_indexes.addCallback(init_idexes)
+ self.deferred_indexes.addCallback(store_ready)
+ return self.deferred_indexes
+
+ def _wait_for_indexes(self):
+ """
+ Make the marked methods to wait for the indexes to be ready.
+ Heavily based on
+ http://blogs.fluidinfo.com/terry/2009/05/11/a-mixin-class-allowing-python-__init__-methods-to-work-with-twisted-deferreds/
+
+ :param methods: methods that need to wait for the indexes to be ready
+ :type methods: tuple(str)
+ """
+ leap_assert_type(self.wait_for_indexes, list)
+ methods = self.wait_for_indexes
+
+ self.waiting = []
+ self.stored = {}
+
+ def makeWrapper(method):
+ def wrapper(*args, **kw):
+ d = defer.Deferred()
+ d.addCallback(lambda _: self.stored[method](*args, **kw))
+ self.waiting.append(d)
+ return d
+ return wrapper
+
+ for method in methods:
+ self.stored[method] = getattr(self, method)
+ setattr(self, method, makeWrapper(method))
+
+ def _restore_waiting_methods(self, _):
+ for method in self.stored:
+ setattr(self, method, self.stored[method])
+ for d in self.waiting:
+ d.callback(None)
+
+
+class SoledadMailAdaptor(SoledadIndexMixin):
+
+ implements(IMailAdaptor)
+ store = None
+
+ indexes = indexes.MAIL_INDEXES
+ wait_for_indexes = ['get_or_create_mbox', 'update_mbox', 'get_all_mboxes']
+
+ mboxwrapper_klass = MailboxWrapper
+
+ def __init__(self):
+ SoledadIndexMixin.__init__(self)
+
+ # Message handling
+
+ def get_msg_from_string(self, MessageClass, raw_msg):
+ """
+ Get an instance of a MessageClass initialized with a MessageWrapper
+ that contains all the parts obtained from parsing the raw string for
+ the message.
+
+ :param MessageClass: any Message class that can be initialized passing
+ an instance of an IMessageWrapper implementor.
+ :type MessageClass: type
+ :param raw_msg: a string containing the raw email message.
+ :type raw_msg: str
+ :rtype: MessageClass instance.
+ """
+ assert(MessageClass is not None)
+ mdoc, fdoc, hdoc, cdocs = _split_into_parts(raw_msg)
+ return self.get_msg_from_docs(
+ MessageClass, mdoc, fdoc, hdoc, cdocs)
+
+ def get_msg_from_docs(self, MessageClass, mdoc, fdoc, hdoc, cdocs=None,
+ uid=None):
+ """
+ Get an instance of a MessageClass initialized with a MessageWrapper
+ that contains the passed part documents.
+
+ This is not the recommended way of obtaining a message, unless you know
+ how to take care of ensuring the internal consistency between the part
+ documents, or unless you are glueing together the part documents that
+ have been previously generated by `get_msg_from_string`.
+
+ :param MessageClass: any Message class that can be initialized passing
+ an instance of an IMessageWrapper implementor.
+ :type MessageClass: type
+ :param fdoc: a dictionary containing values from which a
+ FlagsDocWrapper can be initialized
+ :type fdoc: dict
+ :param hdoc: a dictionary containing values from which a
+ HeaderDocWrapper can be initialized
+ :type hdoc: dict
+ :param cdocs: None, or a dictionary mapping integers (1-indexed) to
+ dicts from where a ContentDocWrapper can be initialized.
+ :type cdocs: dict, or None
+
+ :rtype: MessageClass instance.
+ """
+ assert(MessageClass is not None)
+ return MessageClass(MessageWrapper(mdoc, fdoc, hdoc, cdocs), uid=uid)
+
+ def get_msg_from_mdoc_id(self, MessageClass, store, mdoc_id,
+ uid=None, get_cdocs=False):
+
+ def wrap_meta_doc(doc):
+ cls = MetaMsgDocWrapper
+ return cls(doc_id=doc.doc_id, **doc.content)
+
+ def get_part_docs_from_mdoc_wrapper(wrapper):
+ d_docs = []
+ d_docs.append(store.get_doc(wrapper.fdoc))
+ d_docs.append(store.get_doc(wrapper.hdoc))
+ for cdoc in wrapper.cdocs:
+ d_docs.append(store.get_doc(cdoc))
+
+ def add_mdoc(doc_list):
+ return [wrapper.serialize()] + doc_list
+
+ d = defer.gatherResults(d_docs)
+ d.addCallback(add_mdoc)
+ return d
+
+ def get_parts_doc_from_mdoc_id():
+ mbox = re.findall(constants.METAMSGID_MBOX_RE, mdoc_id)[0]
+ chash = re.findall(constants.METAMSGID_CHASH_RE, mdoc_id)[0]
+
+ def _get_fdoc_id_from_mdoc_id():
+ return constants.FDOCID.format(mbox_uuid=mbox, chash=chash)
+
+ def _get_hdoc_id_from_mdoc_id():
+ return constants.HDOCID.format(mbox_uuid=mbox, chash=chash)
+
+ d_docs = []
+ fdoc_id = _get_fdoc_id_from_mdoc_id()
+ hdoc_id = _get_hdoc_id_from_mdoc_id()
+
+ d_docs.append(store.get_doc(mdoc_id))
+ d_docs.append(store.get_doc(fdoc_id))
+ d_docs.append(store.get_doc(hdoc_id))
+
+ d = defer.gatherResults(d_docs)
+ return d
+
+ def _err_log_failure_part_docs(failure):
+ # See https://leap.se/code/issues/7495.
+ # This avoids blocks, but the real cause still needs to be
+ # isolated (0.9.0rc3) -- kali
+ log.msg("BUG ---------------------------------------------------")
+ log.msg("BUG: Error while retrieving part docs for mdoc id %s" %
+ mdoc_id)
+ log.err(failure)
+ log.msg("BUG (please report above info) ------------------------")
+ return []
+
+ def _err_log_cannot_find_msg(failure):
+ log.msg("BUG: Error while getting msg (uid=%s)" % uid)
+ return None
+
+ if get_cdocs:
+ d = store.get_doc(mdoc_id)
+ d.addCallback(wrap_meta_doc)
+ d.addCallback(get_part_docs_from_mdoc_wrapper)
+ d.addErrback(_err_log_failure_part_docs)
+
+ else:
+ d = get_parts_doc_from_mdoc_id()
+
+ d.addCallback(self._get_msg_from_variable_doc_list,
+ msg_class=MessageClass, uid=uid)
+ d.addErrback(_err_log_cannot_find_msg)
+ return d
+
+ def _get_msg_from_variable_doc_list(self, doc_list, msg_class, uid=None):
+ if len(doc_list) == 3:
+ mdoc, fdoc, hdoc = doc_list
+ cdocs = None
+ elif len(doc_list) > 3:
+ # XXX is this case used?
+ mdoc, fdoc, hdoc = doc_list[:3]
+ cdocs = dict(enumerate(doc_list[3:], 1))
+ return self.get_msg_from_docs(
+ msg_class, mdoc, fdoc, hdoc, cdocs, uid=uid)
+
+ def get_flags_from_mdoc_id(self, store, mdoc_id):
+ """
+ # XXX stuff here...
+ """
+ mbox = re.findall(constants.METAMSGID_MBOX_RE, mdoc_id)[0]
+ chash = re.findall(constants.METAMSGID_CHASH_RE, mdoc_id)[0]
+
+ def _get_fdoc_id_from_mdoc_id():
+ return constants.FDOCID.format(mbox_uuid=mbox, chash=chash)
+
+ fdoc_id = _get_fdoc_id_from_mdoc_id()
+
+ def wrap_fdoc(doc):
+ if not doc:
+ return
+ cls = FlagsDocWrapper
+ return cls(doc_id=doc.doc_id, **doc.content)
+
+ def get_flags(fdoc_wrapper):
+ if not fdoc_wrapper:
+ return []
+ return fdoc_wrapper.get_flags()
+
+ d = store.get_doc(fdoc_id)
+ d.addCallback(wrap_fdoc)
+ d.addCallback(get_flags)
+ return d
+
+ def create_msg(self, store, msg):
+ """
+ :param store: an instance of soledad, or anything that behaves alike
+ :param msg: a Message object.
+
+ :return: a Deferred that is fired when all the underlying documents
+ have been created.
+ :rtype: defer.Deferred
+ """
+ wrapper = msg.get_wrapper()
+ return wrapper.create(store)
+
+ def update_msg(self, store, msg):
+ """
+ :param msg: a Message object.
+ :param store: an instance of soledad, or anything that behaves alike
+ :return: a Deferred that is fired when all the underlying documents
+ have been updated (actually, it's only the fdoc that's allowed
+ to update).
+ :rtype: defer.Deferred
+ """
+ wrapper = msg.get_wrapper()
+ return wrapper.update(store)
+
+ # batch deletion
+
+ def del_all_flagged_messages(self, store, mbox_uuid):
+ """
+ Delete all messages flagged as deleted.
+ """
+ def err(failure):
+ log.err(failure)
+
+ def delete_fdoc_and_mdoc_flagged(fdocs):
+ # low level here, not using the wrappers...
+ # get meta doc ids from the flag doc ids
+ fdoc_ids = [doc.doc_id for doc in fdocs]
+ mdoc_ids = map(lambda s: "M" + s[1:], fdoc_ids)
+
+ def delete_all_docs(mdocs, fdocs):
+ mdocs = list(mdocs)
+ doc_ids = [m.doc_id for m in mdocs]
+ _d = []
+ docs = mdocs + fdocs
+ for doc in docs:
+ _d.append(store.delete_doc(doc))
+ d = defer.gatherResults(_d)
+ # return the mdocs ids only
+ d.addCallback(lambda _: doc_ids)
+ return d
+
+ d = store.get_docs(mdoc_ids)
+ d.addCallback(delete_all_docs, fdocs)
+ d.addErrback(err)
+ return d
+
+ type_ = FlagsDocWrapper.model.type_
+ uuid = mbox_uuid.replace('-', '_')
+ deleted_index = indexes.TYPE_MBOX_DEL_IDX
+
+ d = store.get_from_index(deleted_index, type_, uuid, "1")
+ d.addCallbacks(delete_fdoc_and_mdoc_flagged, err)
+ return d
+
+ # count messages
+
+ def get_count_unseen(self, store, mbox_uuid):
+ """
+ Get the number of unseen messages for a given mailbox.
+
+ :param store: instance of Soledad.
+ :param mbox_uuid: the uuid for this mailbox.
+ :rtype: int
+ """
+ type_ = FlagsDocWrapper.model.type_
+ uuid = mbox_uuid.replace('-', '_')
+
+ unseen_index = indexes.TYPE_MBOX_SEEN_IDX
+
+ d = store.get_count_from_index(unseen_index, type_, uuid, "0")
+ d.addErrback(self._errback)
+ return d
+
+ def get_count_recent(self, store, mbox_uuid):
+ """
+ Get the number of recent messages for a given mailbox.
+
+ :param store: instance of Soledad.
+ :param mbox_uuid: the uuid for this mailbox.
+ :rtype: int
+ """
+ type_ = FlagsDocWrapper.model.type_
+ uuid = mbox_uuid.replace('-', '_')
+
+ recent_index = indexes.TYPE_MBOX_RECENT_IDX
+
+ d = store.get_count_from_index(recent_index, type_, uuid, "1")
+ d.addErrback(self._errback)
+ return d
+
+ # search api
+
+ def get_mdoc_id_from_msgid(self, store, mbox_uuid, msgid):
+ """
+ Get the UID for a message with the passed msgid (the one in the headers
+ msg-id).
+ This is used by the MUA to retrieve the recently saved draft.
+ """
+ type_ = HeaderDocWrapper.model.type_
+ uuid = mbox_uuid.replace('-', '_')
+
+ msgid_index = indexes.TYPE_MSGID_IDX
+
+ def get_mdoc_id(hdoc):
+ if not hdoc:
+ log.msg("Could not find a HDOC with MSGID %s" % msgid)
+ return None
+ hdoc = hdoc[0]
+ mdoc_id = hdoc.doc_id.replace("H-", "M-%s-" % uuid)
+ return mdoc_id
+
+ d = store.get_from_index(msgid_index, type_, msgid)
+ d.addCallback(get_mdoc_id)
+ return d
+
+ # Mailbox handling
+
+ def get_or_create_mbox(self, store, name):
+ """
+ Get the mailbox with the given name, or create one if it does not
+ exist.
+
+ :param store: instance of Soledad
+ :param name: the name of the mailbox
+ :type name: str
+ """
+ index = indexes.TYPE_MBOX_IDX
+ mbox = normalize_mailbox(name)
+ return MailboxWrapper.get_or_create(store, index, mbox)
+
+ def update_mbox(self, store, mbox_wrapper):
+ """
+ Update the documents for a given mailbox.
+ :param mbox_wrapper: MailboxWrapper instance
+ :type mbox_wrapper: MailboxWrapper
+ :return: a Deferred that will be fired when the mailbox documents
+ have been updated.
+ :rtype: defer.Deferred
+ """
+ leap_assert_type(mbox_wrapper, SoledadDocumentWrapper)
+ return mbox_wrapper.update(store)
+
+ def delete_mbox(self, store, mbox_wrapper):
+ leap_assert_type(mbox_wrapper, SoledadDocumentWrapper)
+ return mbox_wrapper.delete(store)
+
+ def get_all_mboxes(self, store):
+ """
+ Retrieve a list with wrappers for all the mailboxes.
+
+ :return: a deferred that will be fired with a list of all the
+ MailboxWrappers found.
+ :rtype: defer.Deferred
+ """
+ return MailboxWrapper.get_all(store)
+
+ def _errback(self, failure):
+ log.err(failure)
+
+
+def _split_into_parts(raw):
+ # TODO signal that we can delete the original message!-----
+ # when all the processing is done.
+ # TODO add the linked-from info !
+ # TODO add reference to the original message?
+ # TODO populate Default FLAGS/TAGS (unseen?)
+ # TODO seed propely the content_docs with defaults??
+
+ msg, chash, multi = _parse_msg(raw)
+ size = len(msg.as_string())
+
+ parts_map = walk.get_tree(msg)
+ cdocs_list = list(walk.get_raw_docs(msg))
+ cdocs_phashes = [c['phash'] for c in cdocs_list]
+ body_phash = walk.get_body_phash(msg)
+
+ mdoc = _build_meta_doc(chash, cdocs_phashes)
+ fdoc = _build_flags_doc(chash, size, multi)
+ hdoc = _build_headers_doc(msg, chash, body_phash, parts_map)
+
+ # The MessageWrapper expects a dict, one-indexed
+ cdocs = dict(enumerate(cdocs_list, 1))
+
+ return mdoc, fdoc, hdoc, cdocs
+
+
+def _parse_msg(raw):
+ msg = message_from_string(raw)
+ chash = walk.get_hash(raw)
+ multi = msg.is_multipart()
+ return msg, chash, multi
+
+
+def _build_meta_doc(chash, cdocs_phashes):
+ _mdoc = MetaMsgDocWrapper()
+ # FIXME passing the inbox name because we don't have the uuid at this
+ # point.
+
+ _mdoc.fdoc = constants.FDOCID.format(mbox_uuid=INBOX_NAME, chash=chash)
+ _mdoc.hdoc = constants.HDOCID.format(chash=chash)
+ _mdoc.cdocs = [constants.CDOCID.format(phash=p) for p in cdocs_phashes]
+
+ return _mdoc.serialize()
+
+
+def _build_flags_doc(chash, size, multi):
+ _fdoc = FlagsDocWrapper(chash=chash, size=size, multi=multi)
+ return _fdoc.serialize()
+
+
+def _build_headers_doc(msg, chash, body_phash, parts_map):
+ """
+ Assemble a headers document from the original parsed message, the
+ content-hash, and the parts map.
+
+ It takes into account possibly repeated headers.
+ """
+ headers = defaultdict(list)
+ for k, v in msg.items():
+ headers[k].append(v)
+ # "fix" for repeated headers (as in "Received:"
+ for k, v in headers.items():
+ newline = "\n%s: " % (k.lower(),)
+ headers[k] = newline.join(v)
+
+ lower_headers = lowerdict(dict(headers))
+ msgid = first(_MSGID_RE.findall(
+ lower_headers.get('message-id', '')))
+
+ _hdoc = HeaderDocWrapper(
+ chash=chash, headers=headers, body=body_phash,
+ msgid=msgid)
+
+ def copy_attr(headers, key, doc):
+ if key in headers:
+ setattr(doc, key, headers[key])
+
+ copy_attr(lower_headers, "subject", _hdoc)
+ copy_attr(lower_headers, "date", _hdoc)
+
+ hdoc = _hdoc.serialize()
+ # add some of the attr from the parts map to header doc
+ for key in parts_map:
+ if key in ('body', 'multi', 'part_map'):
+ hdoc[key] = parts_map[key]
+ return stringify_parts_map(hdoc)
diff --git a/src/leap/bitmask/mail/adaptors/soledad_indexes.py b/src/leap/bitmask/mail/adaptors/soledad_indexes.py
new file mode 100644
index 0000000..eec7d28
--- /dev/null
+++ b/src/leap/bitmask/mail/adaptors/soledad_indexes.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+# soledad_indexes.py
+# Copyright (C) 2013, 2014 LEAP
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+Soledad Indexes for Mail Documents.
+"""
+
+# TODO
+# [ ] hide most of the constants here
+
+# Document Type, for indexing
+
+TYPE = "type"
+MBOX = "mbox"
+MBOX_UUID = "mbox_uuid"
+FLAGS = "flags"
+HEADERS = "head"
+CONTENT = "cnt"
+RECENT = "rct"
+HDOCS_SET = "hdocset"
+
+INCOMING_KEY = "incoming"
+ERROR_DECRYPTING_KEY = "errdecr"
+
+# indexing keys
+CONTENT_HASH = "chash"
+PAYLOAD_HASH = "phash"
+MSGID = "msgid"
+UID = "uid"
+
+
+# Index types
+# --------------
+
+TYPE_IDX = 'by-type'
+TYPE_MBOX_IDX = 'by-type-and-mbox'
+TYPE_MBOX_UUID_IDX = 'by-type-and-mbox-uuid'
+TYPE_SUBS_IDX = 'by-type-and-subscribed'
+TYPE_MSGID_IDX = 'by-type-and-message-id'
+TYPE_MBOX_SEEN_IDX = 'by-type-and-mbox-and-seen'
+TYPE_MBOX_RECENT_IDX = 'by-type-and-mbox-and-recent'
+TYPE_MBOX_DEL_IDX = 'by-type-and-mbox-and-deleted'
+TYPE_MBOX_C_HASH_IDX = 'by-type-and-mbox-and-contenthash'
+TYPE_C_HASH_IDX = 'by-type-and-contenthash'
+TYPE_C_HASH_PART_IDX = 'by-type-and-contenthash-and-partnumber'
+TYPE_P_HASH_IDX = 'by-type-and-payloadhash'
+
+# Soledad index for incoming mail, without decrypting errors.
+# and the backward-compatible index, will be deprecated at 0.7
+JUST_MAIL_IDX = "just-mail"
+JUST_MAIL_COMPAT_IDX = "just-mail-compat"
+
+
+# TODO
+# it would be nice to measure the cost of indexing
+# by many fields.
+
+# TODO
+# make the indexes dict more readable!
+
+MAIL_INDEXES = {
+ # generic
+ TYPE_IDX: [TYPE],
+ TYPE_MBOX_IDX: [TYPE, MBOX],
+ TYPE_MBOX_UUID_IDX: [TYPE, MBOX_UUID],
+
+ # XXX deprecate 0.4.0
+ # TYPE_MBOX_UID_IDX: [TYPE, MBOX, UID],
+
+ # mailboxes
+ TYPE_SUBS_IDX: [TYPE, 'bool(subscribed)'],
+
+ # fdocs uniqueness
+ TYPE_MBOX_C_HASH_IDX: [TYPE, MBOX, CONTENT_HASH],
+
+ # headers doc - search by msgid.
+ TYPE_MSGID_IDX: [TYPE, MSGID],
+
+ # content, headers doc
+ TYPE_C_HASH_IDX: [TYPE, CONTENT_HASH],
+
+ # attachment payload dedup
+ TYPE_P_HASH_IDX: [TYPE, PAYLOAD_HASH],
+
+ # messages
+ TYPE_MBOX_SEEN_IDX: [TYPE, MBOX_UUID, 'bool(seen)'],
+ TYPE_MBOX_RECENT_IDX: [TYPE, MBOX_UUID, 'bool(recent)'],
+ TYPE_MBOX_DEL_IDX: [TYPE, MBOX_UUID, 'bool(deleted)'],
+
+ # incoming queue
+ JUST_MAIL_IDX: ["bool(%s)" % (INCOMING_KEY,),
+ "bool(%s)" % (ERROR_DECRYPTING_KEY,)],
+}
diff --git a/src/leap/bitmask/mail/adaptors/tests/rfc822.message b/src/leap/bitmask/mail/adaptors/tests/rfc822.message
new file mode 120000
index 0000000..b19cc28
--- /dev/null
+++ b/src/leap/bitmask/mail/adaptors/tests/rfc822.message
@@ -0,0 +1 @@
+../../tests/rfc822.message \ No newline at end of file
diff --git a/src/leap/bitmask/mail/adaptors/tests/test_models.py b/src/leap/bitmask/mail/adaptors/tests/test_models.py
new file mode 100644
index 0000000..b82cfad
--- /dev/null
+++ b/src/leap/bitmask/mail/adaptors/tests/test_models.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+# test_models.py
+# Copyright (C) 2014 LEAP
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+Tests for the leap.mail.adaptors.models module.
+"""
+from twisted.trial import unittest
+
+from leap.mail.adaptors import models
+
+
+class SerializableModelsTestCase(unittest.TestCase):
+
+ def test_good_serialized_model(self):
+
+ class M(models.SerializableModel):
+ foo = 42
+ bar = 33
+ baaz_ = None
+ _nope = 0
+ __nope = 0
+
+ def not_today(self):
+ pass
+
+ class IgnoreMe(object):
+ pass
+
+ def killmeplease(x):
+ return x
+
+ serialized = M.serialize()
+ expected = {'foo': 42, 'bar': 33, 'baaz': None}
+ self.assertEqual(serialized, expected)
+
+
+class DocumentWrapperTestCase(unittest.TestCase):
+
+ def test_wrapper_defaults(self):
+
+ class Wrapper(models.DocumentWrapper):
+ class model(models.SerializableModel):
+ foo = 42
+ bar = 11
+
+ wrapper = Wrapper()
+ wrapper._ignored = True
+ serialized = wrapper.serialize()
+ expected = {'foo': 42, 'bar': 11}
+ self.assertEqual(serialized, expected)
+
+ def test_initialized_wrapper(self):
+
+ class Wrapper(models.DocumentWrapper):
+ class model(models.SerializableModel):
+ foo = 42
+ bar_ = 11
+
+ wrapper = Wrapper(foo=0, bar=-1)
+ serialized = wrapper.serialize()
+ expected = {'foo': 0, 'bar': -1}
+ self.assertEqual(serialized, expected)
+
+ wrapper.foo = 23
+ serialized = wrapper.serialize()
+ expected = {'foo': 23, 'bar': -1}
+ self.assertEqual(serialized, expected)
+
+ wrapper = Wrapper(foo=0)
+ serialized = wrapper.serialize()
+ expected = {'foo': 0, 'bar': 11}
+ self.assertEqual(serialized, expected)
+
+ def test_invalid_initialized_wrapper(self):
+
+ class Wrapper(models.DocumentWrapper):
+ class model(models.SerializableModel):
+ foo = 42
+
+ def getwrapper():
+ return Wrapper(bar=1)
+ self.assertRaises(RuntimeError, getwrapper)
+
+ def test_no_model_wrapper(self):
+
+ class Wrapper(models.DocumentWrapper):
+ pass
+
+ def getwrapper():
+ w = Wrapper()
+ w.foo = None
+
+ self.assertRaises(RuntimeError, getwrapper)
diff --git a/src/leap/bitmask/mail/adaptors/tests/test_soledad_adaptor.py b/src/leap/bitmask/mail/adaptors/tests/test_soledad_adaptor.py
new file mode 100644
index 0000000..73eaf16
--- /dev/null
+++ b/src/leap/bitmask/mail/adaptors/tests/test_soledad_adaptor.py
@@ -0,0 +1,529 @@
+# -*- coding: utf-8 -*-
+# test_soledad_adaptor.py
+# Copyright (C) 2014 LEAP
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
+Tests for the Soledad Adaptor module - leap.mail.adaptors.soledad
+"""
+import os
+from functools import partial
+
+from twisted.internet import defer
+
+from leap.mail.adaptors import models
+from leap.mail.adaptors.soledad import SoledadDocumentWrapper
+from leap.mail.adaptors.soledad import SoledadIndexMixin
+from leap.mail.adaptors.soledad import SoledadMailAdaptor
+from leap.mail.testing.common import SoledadTestMixin
+
+from email.MIMEMultipart import MIMEMultipart
+from email.mime.text import MIMEText
+
+# DEBUG
+# import logging
+# logging.basicConfig(level=logging.DEBUG)
+
+
+class CounterWrapper(SoledadDocumentWrapper):
+ class model(models.SerializableModel):
+ counter = 0
+ flag = None
+
+
+class CharacterWrapper(SoledadDocumentWrapper):
+ class model(models.SerializableModel):
+ name = ""
+ age = 20
+
+
+class ActorWrapper(SoledadDocumentWrapper):
+ class model(models.SerializableModel):
+ type_ = "actor"
+ name = None
+
+ class __meta__(object):
+ index = "name"
+ list_index = ("by-type", "type_")
+
+
+class TestAdaptor(SoledadIndexMixin):
+ indexes = {'by-name': ['name'],
+ 'by-type-and-name': ['type', 'name'],
+ 'by-type': ['type']}
+
+
+class SoledadDocWrapperTestCase(SoledadTestMixin):
+ """
+ Tests for the SoledadDocumentWrapper.
+ """
+ def assert_num_docs(self, num, docs):
+ self.assertEqual(len(docs[1]), num)
+
+ def test_create_single(self):
+
+ store = self._soledad
+ wrapper = CounterWrapper()
+
+ def assert_one_doc(docs):
+ self.assertEqual(docs[0], 1)
+
+ d = wrapper.create(store)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(assert_one_doc)
+ return d
+
+ def test_create_many(self):
+
+ store = self._soledad
+ w1 = CounterWrapper()
+ w2 = CounterWrapper(counter=1)
+ w3 = CounterWrapper(counter=2)
+ w4 = CounterWrapper(counter=3)
+ w5 = CounterWrapper(counter=4)
+
+ d1 = [w1.create(store),
+ w2.create(store),
+ w3.create(store),
+ w4.create(store),
+ w5.create(store)]
+
+ d = defer.gatherResults(d1)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 5))
+ return d
+
+ def test_multiple_updates(self):
+
+ store = self._soledad
+ wrapper = CounterWrapper(counter=1)
+ MAX = 100
+
+ def assert_doc_id(doc):
+ self.assertTrue(wrapper._doc_id is not None)
+ return doc
+
+ def assert_counter_initial_ok(doc):
+ self.assertEqual(wrapper.counter, 1)
+
+ def increment_counter(ignored):
+ d1 = []
+
+ def record_revision(revision):
+ rev = int(revision.split(':')[1])
+ self.results.append(rev)
+
+ for i in list(range(MAX)):
+ wrapper.counter += 1
+ wrapper.flag = i % 2 == 0
+ d = wrapper.update(store)
+ d.addCallback(record_revision)
+ d1.append(d)
+
+ return defer.gatherResults(d1)
+
+ def assert_counter_final_ok(doc):
+ self.assertEqual(doc.content['counter'], MAX + 1)
+ self.assertEqual(doc.content['flag'], False)
+
+ def assert_results_ordered_list(ignored):
+ self.assertEqual(self.results, sorted(range(2, MAX + 2)))
+
+ d = wrapper.create(store)
+ d.addCallback(assert_doc_id)
+ d.addCallback(assert_counter_initial_ok)
+ d.addCallback(increment_counter)
+ d.addCallback(lambda _: store.get_doc(wrapper._doc_id))
+ d.addCallback(assert_counter_final_ok)
+ d.addCallback(assert_results_ordered_list)
+ return d
+
+ def test_delete(self):
+ adaptor = TestAdaptor()
+ store = self._soledad
+
+ wrapper_list = []
+
+ def get_or_create_bob(ignored):
+ def add_to_list(wrapper):
+ wrapper_list.append(wrapper)
+ return wrapper
+ wrapper = CharacterWrapper.get_or_create(
+ store, 'by-name', 'bob')
+ wrapper.addCallback(add_to_list)
+ return wrapper
+
+ def delete_bob(ignored):
+ wrapper = wrapper_list[0]
+ return wrapper.delete(store)
+
+ d = adaptor.initialize_store(store)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 0))
+
+ # this should create bob document
+ d.addCallback(get_or_create_bob)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 1))
+
+ d.addCallback(delete_bob)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 0))
+ return d
+
+ def test_get_or_create(self):
+ adaptor = TestAdaptor()
+ store = self._soledad
+
+ def get_or_create_bob(ignored):
+ wrapper = CharacterWrapper.get_or_create(
+ store, 'by-name', 'bob')
+ return wrapper
+
+ d = adaptor.initialize_store(store)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 0))
+
+ # this should create bob document
+ d.addCallback(get_or_create_bob)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 1))
+
+ # this should get us bob document
+ d.addCallback(get_or_create_bob)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 1))
+ return d
+
+ def test_get_or_create_multi_index(self):
+ adaptor = TestAdaptor()
+ store = self._soledad
+
+ def get_or_create_actor_harry(ignored):
+ wrapper = ActorWrapper.get_or_create(
+ store, 'by-type-and-name', 'harrison')
+ return wrapper
+
+ def create_director_harry(ignored):
+ wrapper = ActorWrapper(name="harrison", type="director")
+ return wrapper.create(store)
+
+ d = adaptor.initialize_store(store)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 0))
+
+ # this should create harrison document
+ d.addCallback(get_or_create_actor_harry)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 1))
+
+ # this should get us harrison document
+ d.addCallback(get_or_create_actor_harry)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 1))
+
+ # create director harry, should create new doc
+ d.addCallback(create_director_harry)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 2))
+
+ # this should get us harrison document, still 2 docs
+ d.addCallback(get_or_create_actor_harry)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 2))
+ return d
+
+ def test_get_all(self):
+ adaptor = TestAdaptor()
+ store = self._soledad
+ actor_names = ["harry", "carrie", "mark", "david"]
+
+ def create_some_actors(ignored):
+ deferreds = []
+ for name in actor_names:
+ dw = ActorWrapper.get_or_create(
+ store, 'by-type-and-name', name)
+ deferreds.append(dw)
+ return defer.gatherResults(deferreds)
+
+ d = adaptor.initialize_store(store)
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 0))
+
+ d.addCallback(create_some_actors)
+
+ d.addCallback(lambda _: store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 4))
+
+ def assert_actor_list_is_expected(res):
+ got = set([actor.name for actor in res])
+ expected = set(actor_names)
+ self.assertEqual(got, expected)
+
+ d.addCallback(lambda _: ActorWrapper.get_all(store))
+ d.addCallback(assert_actor_list_is_expected)
+ return d
+
+HERE = os.path.split(os.path.abspath(__file__))[0]
+
+
+class MessageClass(object):
+ def __init__(self, wrapper, uid):
+ self.wrapper = wrapper
+ self.uid = uid
+
+ def get_wrapper(self):
+ return self.wrapper
+
+
+class SoledadMailAdaptorTestCase(SoledadTestMixin):
+ """
+ Tests for the SoledadMailAdaptor.
+ """
+
+ def get_adaptor(self):
+ adaptor = SoledadMailAdaptor()
+ adaptor.store = self._soledad
+ return adaptor
+
+ def assert_num_docs(self, num, docs):
+ self.assertEqual(len(docs[1]), num)
+
+ def test_mail_adaptor_init(self):
+ adaptor = self.get_adaptor()
+ self.assertTrue(isinstance(adaptor.indexes, dict))
+ self.assertTrue(len(adaptor.indexes) != 0)
+
+ # Messages
+
+ def test_get_msg_from_string(self):
+ adaptor = self.get_adaptor()
+
+ with open(os.path.join(HERE, "rfc822.message")) as f:
+ raw = f.read()
+
+ msg = adaptor.get_msg_from_string(MessageClass, raw)
+
+ chash = ("D27B2771C0DCCDCB468EE65A4540438"
+ "09DBD11588E87E951545BE0CBC321C308")
+ phash = ("64934534C1C80E0D4FA04BE1CCBA104"
+ "F07BCA5F469C86E2C0ABE1D41310B7299")
+ subject = ("[Twisted-commits] rebuild now works on "
+ "python versions from 2.2.0 and up.")
+ self.assertTrue(msg.wrapper.fdoc is not None)
+ self.assertTrue(msg.wrapper.hdoc is not None)
+ self.assertTrue(msg.wrapper.cdocs is not None)
+ self.assertEquals(len(msg.wrapper.cdocs), 1)
+ self.assertEquals(msg.wrapper.fdoc.chash, chash)
+ self.assertEquals(msg.wrapper.fdoc.size, 3837)
+ self.assertEquals(msg.wrapper.hdoc.chash, chash)
+ self.assertEqual(dict(msg.wrapper.hdoc.headers)['Subject'],
+ subject)
+ self.assertEqual(msg.wrapper.hdoc.subject, subject)
+ self.assertEqual(msg.wrapper.cdocs[1].phash, phash)
+
+ def test_get_msg_from_string_multipart(self):
+ msg = MIMEMultipart()
+ msg['Subject'] = 'Test multipart mail'
+ msg.attach(MIMEText(u'a utf8 message', _charset='utf-8'))
+ adaptor = self.get_adaptor()
+
+ msg = adaptor.get_msg_from_string(MessageClass, msg.as_string())
+
+ self.assertEqual(
+ 'base64', msg.wrapper.cdocs[1].content_transfer_encoding)
+ self.assertEqual(
+ 'text/plain', msg.wrapper.cdocs[1].content_type)
+ self.assertEqual(
+ 'YSB1dGY4IG1lc3NhZ2U=\n', msg.wrapper.cdocs[1].raw)
+
+ def test_get_msg_from_docs(self):
+ adaptor = self.get_adaptor()
+ mdoc = dict(
+ fdoc="F-Foobox-deadbeef",
+ hdoc="H-deadbeef",
+ cdocs=["C-deadabad"])
+ fdoc = dict(
+ mbox_uuid="Foobox",
+ flags=('\Seen', '\Nice'),
+ tags=('Personal', 'TODO'),
+ seen=False, deleted=False,
+ recent=False, multi=False)
+ hdoc = dict(
+ chash="deadbeef",
+ subject="Test Msg")
+ cdocs = {
+ 1: dict(
+ raw='This is a test message')}
+
+ msg = adaptor.get_msg_from_docs(
+ MessageClass, mdoc, fdoc, hdoc, cdocs=cdocs)
+ self.assertEqual(msg.wrapper.fdoc.flags,
+ ('\Seen', '\Nice'))
+ self.assertEqual(msg.wrapper.fdoc.tags,
+ ('Personal', 'TODO'))
+ self.assertEqual(msg.wrapper.fdoc.mbox_uuid, "Foobox")
+ self.assertEqual(msg.wrapper.hdoc.multi, False)
+ self.assertEqual(msg.wrapper.hdoc.subject,
+ "Test Msg")
+ self.assertEqual(msg.wrapper.cdocs[1].raw,
+ "This is a test message")
+
+ def test_get_msg_from_metamsg_doc_id(self):
+ # TODO complete-me!
+ pass
+
+ test_get_msg_from_metamsg_doc_id.skip = "Not yet implemented"
+
+ def test_create_msg(self):
+ adaptor = self.get_adaptor()
+
+ with open(os.path.join(HERE, "rfc822.message")) as f:
+ raw = f.read()
+ msg = adaptor.get_msg_from_string(MessageClass, raw)
+
+ def check_create_result(created):
+ # that's one mdoc, one hdoc, one fdoc, one cdoc
+ self.assertEqual(len(created), 4)
+ for doc in created:
+ self.assertTrue(
+ doc.__class__.__name__,
+ "SoledadDocument")
+
+ d = adaptor.create_msg(adaptor.store, msg)
+ d.addCallback(check_create_result)
+ return d
+
+ def test_update_msg(self):
+ adaptor = self.get_adaptor()
+ with open(os.path.join(HERE, "rfc822.message")) as f:
+ raw = f.read()
+
+ def assert_msg_has_doc_id(ignored, msg):
+ wrapper = msg.get_wrapper()
+ self.assertTrue(wrapper.fdoc.doc_id is not None)
+
+ def assert_msg_has_no_flags(ignored, msg):
+ wrapper = msg.get_wrapper()
+ self.assertEqual(wrapper.fdoc.flags, [])
+
+ def update_msg_flags(ignored, msg):
+ wrapper = msg.get_wrapper()
+ wrapper.fdoc.flags = ["This", "That"]
+ return wrapper.update(adaptor.store)
+
+ def assert_msg_has_flags(ignored, msg):
+ wrapper = msg.get_wrapper()
+ self.assertEqual(wrapper.fdoc.flags, ["This", "That"])
+
+ def get_fdoc_and_check_flags(ignored):
+ def assert_doc_has_flags(doc):
+ self.assertEqual(doc.content['flags'],
+ ['This', 'That'])
+ wrapper = msg.get_wrapper()
+ d = adaptor.store.get_doc(wrapper.fdoc.doc_id)
+ d.addCallback(assert_doc_has_flags)
+ return d
+
+ msg = adaptor.get_msg_from_string(MessageClass, raw)
+ d = adaptor.create_msg(adaptor.store, msg)
+ d.addCallback(lambda _: adaptor.store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 4))
+ d.addCallback(assert_msg_has_doc_id, msg)
+ d.addCallback(assert_msg_has_no_flags, msg)
+
+ # update it!
+ d.addCallback(update_msg_flags, msg)
+ d.addCallback(assert_msg_has_flags, msg)
+ d.addCallback(get_fdoc_and_check_flags)
+ return d
+
+ # Mailboxes
+
+ def test_get_or_create_mbox(self):
+ adaptor = self.get_adaptor()
+
+ def get_or_create_mbox(ignored):
+ d = adaptor.get_or_create_mbox(adaptor.store, "Trash")
+ return d
+
+ def assert_good_doc(mbox_wrapper):
+ self.assertTrue(mbox_wrapper.doc_id is not None)
+ self.assertEqual(mbox_wrapper.mbox, "Trash")
+ self.assertEqual(mbox_wrapper.type, "mbox")
+ self.assertEqual(mbox_wrapper.closed, False)
+ self.assertEqual(mbox_wrapper.subscribed, False)
+
+ d = adaptor.initialize_store(adaptor.store)
+ d.addCallback(get_or_create_mbox)
+ d.addCallback(assert_good_doc)
+ d.addCallback(lambda _: adaptor.store.get_all_docs())
+ d.addCallback(partial(self.assert_num_docs, 1))
+ return d
+
+ def test_update_mbox(self):
+ adaptor = self.get_adaptor()
+
+ wrapper_ref = []
+
+ def get_or_create_mbox(ignored):
+ d = adaptor.get_or_create_mbox(adaptor.store, "Trash")
+ return d
+
+ def update_wrapper(wrapper, wrapper_ref):
+ wrapper_ref.append(wrapper)
+ wrapper.subscribed = True
+ wrapper.closed = True
+ d = adaptor.update_mbox(adaptor.store, wrapper)
+ return d
+
+ def get_mbox_doc_and_check_flags(res, wrapper_ref):
+ wrapper = wrapper_ref[0]
+
+ def assert_doc_has_flags(doc):
+ self.assertEqual(doc.content['subscribed'], True)
+ self.assertEqual(doc.content['closed'], True)
+ d = adaptor.store.get_doc(wrapper.doc_id)
+ d.addCallback(assert_doc_has_flags)
+ return d
+
+ d = adaptor.initialize_store(adaptor.store)
+ d.addCallback(get_or_create_mbox)
+ d.addCallback(update_wrapper, wrapper_ref)
+ d.addCallback(get_mbox_doc_and_check_flags, wrapper_ref)
+ return d
+
+ def test_get_all_mboxes(self):
+ adaptor = self.get_adaptor()
+ mboxes = ("Sent", "Trash", "Personal", "ListFoo")
+
+ def get_or_create_mboxes(ignored):
+ d = []
+ for mbox in mboxes:
+ d.append(adaptor.get_or_create_mbox(
+ adaptor.store, mbox))
+ return defer.gatherResults(d)
+
+ def get_all_mboxes(ignored):
+ return adaptor.get_all_mboxes(adaptor.store)
+
+ def assert_mboxes_match_expected(wrappers):
+ names = [m.mbox for m in wrappers]
+ self.assertEqual(set(names), set(mboxes))
+
+ d = adaptor.initialize_store(adaptor.store)
+ d.addCallback(get_or_create_mboxes)
+ d.addCallback(get_all_mboxes)
+ d.addCallback(assert_mboxes_match_expected)
+ return d