diff options
author | Kali Kaneko (leap communications) <kali@leap.se> | 2017-05-24 01:55:13 +0200 |
---|---|---|
committer | Kali Kaneko (leap communications) <kali@leap.se> | 2017-05-24 14:35:43 +0200 |
commit | 0bba2d84a584396e888d1f4cd0d0011f5137ab8b (patch) | |
tree | 026e0b4dc16689d03899a6d31eeb5a0e49d7ebff /src | |
parent | ba2ae87fa16f1f8aa56faa25b8ab1fdd1fd3b7d3 (diff) |
[bug] fix sending mail error from pixelated
- Create the 'Sent folder' ourselves to avoid pixelated hitting a bug in
mailbox creation.
- I believe there's still a problem with bitmask desing for the adaptor
(in get-or-create mailbox). This needs further tests.
- Case manipualation to avoid having a 'Sent' and 'SENT' folder when
Thunderbird and Pixelated write to those.
- Further hacks to monkeypatch the leap-mail-adapter that Pixelated
uses (make them reuse the account instance!). This is getting insane,
I am really looking forward to the fork.
- Duly note our technical debt in the area of Pixelated integration.
Keeping the Pixelated codebase untouched for a long time will
backfire. As far as I've noticed, we have a basic violation of the
assumptions about a single-instance writes and notifications to all
listeners. As commented in the commit, this should go either for a
guarantee that only one account object is created per user (creating
it in the bootstrapping process in bitmask), or for the opposite
direction in which the listeners are communicated in some other way
(zmq events, for instance).
- In any case, it's strongly recommended to deduplicate the Pixelated
libraries as soon as possible and make Pixelated use a better defined
set of Bitmask's public apis.
- Modify the wrapper create methods so that they return the modified
wrapper itself.
- Resolves: #8903, #8904
Diffstat (limited to 'src')
-rw-r--r-- | src/leap/bitmask/mail/adaptors/soledad.py | 146 | ||||
-rw-r--r-- | src/leap/bitmask/mail/imap/mailbox.py | 1 | ||||
-rw-r--r-- | src/leap/bitmask/mail/incoming/service.py | 4 | ||||
-rw-r--r-- | src/leap/bitmask/mail/mail.py | 117 | ||||
-rw-r--r-- | src/leap/bitmask/mua/pixelizer.py | 64 |
5 files changed, 185 insertions, 147 deletions
diff --git a/src/leap/bitmask/mail/adaptors/soledad.py b/src/leap/bitmask/mail/adaptors/soledad.py index f220aea0..25e4ad31 100644 --- a/src/leap/bitmask/mail/adaptors/soledad.py +++ b/src/leap/bitmask/mail/adaptors/soledad.py @@ -109,6 +109,7 @@ class SoledadDocumentWrapper(models.DocumentWrapper): def set_future_doc_id(self, doc_id): self._future_doc_id = doc_id + @defer.inlineCallbacks def create(self, store, is_copy=False): """ Create the documents for this wrapper. @@ -123,37 +124,36 @@ class SoledadDocumentWrapper(models.DocumentWrapper): Soledad document has been created. :rtype: Deferred """ - leap_assert(self._doc_id is None, - "This document already has a doc_id!") + assert self.doc_id is None, "This document already has a doc_id!" - def update_doc_id(doc): - self._doc_id = doc.doc_id + print "FUTURE", self.future_doc_id + try: + if self.future_doc_id is None: + newdoc = yield store.create_doc( + self.serialize()) + else: + newdoc = yield store.create_doc( + self.serialize(), doc_id=self.future_doc_id) + self._doc_id = newdoc.doc_id self.set_future_doc_id(None) - return doc - - def update_wrapper(failure): - # In the case of some copies (for instance, from one folder to - # another and back to the original folder), the document that we - # want to insert already exists. In this case, putting it - # and overwriting the document with that doc_id is the right thing - # to do. - failure.trap(l2db.errors.RevisionConflict) - self._doc_id = self.future_doc_id - self._future_doc_id = None - return self.update(store) - - if self.future_doc_id is None: - d = store.create_doc(self.serialize()) - else: - d = store.create_doc(self.serialize(), - doc_id=self.future_doc_id) - d.addCallback(update_doc_id) + except l2db.errors.RevisionConflict: + if is_copy: + # In the case of some copies (for instance, from one folder to + # another and back to the original folder), the document that + # we want to insert already exists. In this case, putting it + # and overwriting the document with that doc_id is the right + # thing to do. + self._doc_id = self.future_doc_id + self._future_doc_id = None + yield self.update(store) + else: + self.log.warn( + 'Revision conflict, ignoring: %s' % self.future_doc_id) + except Exception as exc: + self.log.warn('Error while creating %s: %r' % ( + self.future_doc_id, exc)) - if is_copy: - d.addErrback(update_wrapper) - else: - d.addErrback(self._catch_revision_conflict, self.future_doc_id) - return d + defer.returnValue(self) def update(self, store): """ @@ -167,29 +167,21 @@ class SoledadDocumentWrapper(models.DocumentWrapper): return self._lock.run(self._update, store) def _update(self, store): - leap_assert(self._doc_id is not None, - "Need to create doc before updating") + assert self._doc_id is not None, "Need to create doc before updating" + + def log_error(failure, doc_id): + self.log.warn('Error while updating %s' % doc_id) def update_and_put_doc(doc): doc.content.update(self.serialize()) d = store.put_doc(doc) - d.addErrback(self._catch_revision_conflict, doc.doc_id) + d.addErrback(log_error, doc.doc_id) return d d = store.get_doc(self._doc_id) d.addCallback(update_and_put_doc) return d - def _catch_revision_conflict(self, failure, doc_id): - # XXX We can have some RevisionConflicts if we try - # to put the docs that are already there. - # This can happen right now when creating/saving the cdocs - # during a copy. Instead of catching and ignoring this - # error, we should mark them in the copy so there is no attempt to - # create/update them. - failure.trap(l2db.errors.RevisionConflict) - self.log.debug('Got conflict while putting %s' % doc_id) - def delete(self, store): """ Delete the documents for this wrapper. @@ -511,6 +503,7 @@ class MessageWrapper(object): self.log.warn('Empty raw field in cdoc %s' % doc_id) cdoc.set_future_doc_id(doc_id) + @defer.inlineCallbacks def create(self, store, notify_just_mdoc=False, pending_inserts_dict=None): """ Create all the parts for this message in the store. @@ -545,15 +538,11 @@ class MessageWrapper(object): if pending_inserts_dict is None: pending_inserts_dict = {} - leap_assert(self.cdocs, - "Need non empty cdocs to create the " - "MessageWrapper documents") - leap_assert(self.mdoc.doc_id is None, - "Cannot create: mdoc has a doc_id") - leap_assert(self.fdoc.doc_id is None, - "Cannot create: fdoc has a doc_id") + assert self.cdocs, "Need cdocs to create the MessageWrapper docs" + assert self.mdoc.doc_id is None, "Cannot create: mdoc has a doc_id" + assert self.fdoc.doc_id is None, "Cannot create: fdoc has a doc_id" - def unblock_pending_insert(result): + def maybe_unblock_pending(): if pending_inserts_dict: ci_headers = lowerdict(self.hdoc.headers) msgid = ci_headers.get('message-id', None) @@ -562,45 +551,44 @@ class MessageWrapper(object): d.callback(msgid) except KeyError: pass - return result - # TODO check that the doc_ids in the mdoc are coherent - self.d = [] + copy = self._is_copy try: - mdoc_created = self.mdoc.create(store, is_copy=self._is_copy) - except Exception: - self.log.failure("Error creating mdoc") - try: - fdoc_created = self.fdoc.create(store, is_copy=self._is_copy) + mdoc = yield self.mdoc.create(store, is_copy=copy) + print "GOT MDOC >>>>>>>>>>>>>>>>>>", mdoc, "copy?", copy + assert mdoc + self.mdoc = mdoc except Exception: - self.log.failure("Error creating fdoc") + self.log.failure('Error creating mdoc') - self.d.append(mdoc_created) - self.d.append(fdoc_created) + if notify_just_mdoc: + # fire and forget, fast notifies + self.fdoc.create(store, is_copy=copy) + if not copy: + if self.hdoc.doc_id is None: + self.hdoc.create(store) + for cdoc in self.cdocs.values(): + if cdoc.doc_id is not None: + continue + cdoc.create(store) - if not self._is_copy: - if self.hdoc.doc_id is None: - self.d.append(self.hdoc.create(store)) - for cdoc in self.cdocs.values(): - if cdoc.doc_id is not None: - # we could be just linking to an existing - # content-doc. - continue - self.d.append(cdoc.create(store)) + else: + yield self.fdoc.create(store, is_copy=copy) + if not copy: + if self.hdoc.doc_id is None: + yield self.hdoc.create(store) + for cdoc in self.cdocs.values(): + if cdoc.doc_id is not None: + # we could be just linking to an existing + # content-doc. + continue + yield cdoc.create(store) - def log_all_inserted(result): - self.log.debug('All parts inserted for msg!') - return result + maybe_unblock_pending() + defer.returnValue(self) - self.all_inserted_d = defer.gatherResults(self.d, consumeErrors=True) - self.all_inserted_d.addCallback(log_all_inserted) - self.all_inserted_d.addCallback(unblock_pending_insert) - if notify_just_mdoc: - return mdoc_created - else: - return self.all_inserted_d def update(self, store): """ diff --git a/src/leap/bitmask/mail/imap/mailbox.py b/src/leap/bitmask/mail/imap/mailbox.py index 9e74cfc9..0820a040 100644 --- a/src/leap/bitmask/mail/imap/mailbox.py +++ b/src/leap/bitmask/mail/imap/mailbox.py @@ -382,6 +382,7 @@ class IMAPMailbox(object): d = self.collection.add_msg(message, flags, date=date, notify_just_mdoc=notify_just_mdoc) + d.addCallback(lambda message: message.get_uid()) d.addErrback( lambda failure: self.log.failure('Error while adding msg')) return d diff --git a/src/leap/bitmask/mail/incoming/service.py b/src/leap/bitmask/mail/incoming/service.py index 1b132cdc..f1f4ab22 100644 --- a/src/leap/bitmask/mail/incoming/service.py +++ b/src/leap/bitmask/mail/incoming/service.py @@ -832,8 +832,8 @@ class IncomingMail(Service): def msgSavedCallback(result): - if empty(result): - return + #if empty(result): + #return def signal_deleted(doc_id): emit_async(catalog.MAIL_MSG_DELETED_INCOMING, diff --git a/src/leap/bitmask/mail/mail.py b/src/leap/bitmask/mail/mail.py index 92435fdb..3dbb0a1f 100644 --- a/src/leap/bitmask/mail/mail.py +++ b/src/leap/bitmask/mail/mail.py @@ -578,6 +578,7 @@ class MessageCollection(object): # Manipulate messages + @defer.inlineCallbacks def add_msg(self, raw_msg, flags=tuple(), tags=tuple(), date="", notify_just_mdoc=False): """ @@ -603,8 +604,8 @@ class MessageCollection(object): message is met, which currently is a specific mozilla header. :type notify_just_mdoc: bool - :returns: a deferred that will fire with the UID of the inserted - message. + :returns: a deferred that will fire with a Message when this is + inserted. :rtype: deferred """ # TODO watch out if the use of this method in IMAP COPY/APPEND is @@ -630,43 +631,43 @@ class MessageCollection(object): if not self.is_mailbox_collection(): raise NotImplementedError() - else: - mbox_id = self.mbox_uuid - wrapper.set_mbox_uuid(mbox_id) - wrapper.set_flags(flags) - wrapper.set_tags(tags) - wrapper.set_date(date) - - def insert_mdoc_id(_, wrapper): - doc_id = wrapper.mdoc.doc_id - if not doc_id: - # --- BUG ----------------------------------------- - # XXX watch out, sometimes mdoc doesn't have doc_id - # but it has future_id. Should be solved already. - self.log.error('BUG: (please report) Null doc_id for ' - 'document %s' % - (wrapper.mdoc.serialize(),)) - return defer.succeed("mdoc_id not inserted") - # XXX BUG ----------------------------------------- - - # XXX BUG sometimes the table is not yet created, - # so workaround is to make sure we always check for it before - # inserting the doc. I should debug into the real cause. - d = self.mbox_indexer.create_table(self.mbox_uuid) - d.addBoth(lambda _: self.mbox_indexer.insert_doc( - self.mbox_uuid, doc_id)) - return d + mbox_id = self.mbox_uuid + assert mbox_id is not None + wrapper.set_mbox_uuid(mbox_id) + wrapper.set_flags(flags) + wrapper.set_tags(tags) + wrapper.set_date(date) - d = wrapper.create( - self.store, - notify_just_mdoc=notify_just_mdoc, - pending_inserts_dict=self._pending_inserts) - d.addCallback(insert_mdoc_id, wrapper) - d.addCallback(self.cb_signal_unread_to_ui) - d.addCallback(self.notify_new_to_listeners) - d.addErrback(lambda f: self.log.error('Error adding msg!')) + try: + updated_wrapper = yield wrapper.create( + self.store, + notify_just_mdoc=notify_just_mdoc, + pending_inserts_dict=self._pending_inserts) - return d + doc_id = updated_wrapper.mdoc.doc_id + if not doc_id: + doc_id = updated_wrapper.mdoc.future_doc_id + assert doc_id + + except Exception: + self.log.failure('Error creating message') + raise + + # XXX BUG sometimes the table is not yet created, + # so workaround is to make sure we always check for it before + # inserting the doc. I should debug into the real cause. + # It might be related to _get_or_create_mbox creating the mbox + # wrapper but not going through the add_mailbox path that calls the + # indexer. + try: + yield self.mbox_indexer.create_table(self.mbox_uuid) + uid = yield self.mbox_indexer.insert_doc(self.mbox_uuid, doc_id) + except Exception: + self.log.failure('Error indexing message') + else: + self.cb_signal_unread_to_ui() + self.notify_new_to_listeners() + defer.returnValue(Message(wrapper, uid)) # Listeners @@ -676,28 +677,20 @@ class MessageCollection(object): def removeListener(self, listener): self._listeners.remove(listener) - def notify_new_to_listeners(self, result): + def notify_new_to_listeners(self): for listener in self._listeners: listener.notify_new() - return result - def cb_signal_unread_to_ui(self, result): + def cb_signal_unread_to_ui(self, *args): """ Sends an unread event to ui, passing *only* the number of unread messages if *this* is the inbox. This event is catched, for instance, in the Bitmask client that displays a message with the number of unread mails in the INBOX. - - Used as a callback in several commands. - - :param result: ignored """ - # TODO it might make sense to modify the event so that - # it receives both the mailbox name AND the number of unread messages. if self.mbox_name.lower() == "inbox": d = defer.maybeDeferred(self.count_unseen) d.addCallback(self.__cb_signal_unread_to_ui) - return result def __cb_signal_unread_to_ui(self, unseen): """ @@ -705,6 +698,8 @@ class MessageCollection(object): :param unseen: number of unseen messages. :type unseen: int """ + # TODO it might make sense to modify the event so that + # it receives both the mailbox name AND the number of unread messages. emit_async(catalog.MAIL_UNREAD_MESSAGES, self.store.uuid, str(unseen)) def copy_msg(self, msg, new_mbox_uuid): @@ -762,7 +757,7 @@ class MessageCollection(object): d = wrapper.copy(self.store, new_mbox_uuid) d.addCallback(insert_copied_mdoc_id) - d.addCallback(self.notify_new_to_listeners) + d.addCallback(lambda _: self.notify_new_to_listeners()) return d def delete_msg(self, msg): @@ -917,30 +912,28 @@ class Account(object): self._init_d = self._initialize_storage() self._initialize_sync_hooks() + @defer.inlineCallbacks def _initialize_storage(self): + yield self.adaptor.initialize_store(self.store) + mboxes = yield self.list_all_mailbox_names() + if INBOX_NAME not in mboxes: + yield self.add_mailbox(INBOX_NAME) - def add_mailbox_if_none(mboxes): - if not mboxes: - return self.add_mailbox(INBOX_NAME) + # This is so that we create the mboxes before Pixelated tries + # to do it. - def finish_initialization(result): - self.deferred_initialization.callback(None) - if self._ready_cb is not None: - self._ready_cb() + if 'Sent' not in mboxes: + yield self.add_mailbox('Sent') - d = self.adaptor.initialize_store(self.store) - d.addCallback(lambda _: self.list_all_mailbox_names()) - d.addCallback(add_mailbox_if_none) - d.addCallback(finish_initialization) - return d + self.deferred_initialization.callback(None) + if self._ready_cb is not None: + self._ready_cb() def callWhenReady(self, cb, *args, **kw): """ Execute the callback when the initialization of the Account is ready. Note that the callback will receive a first meaningless parameter. """ - # TODO this should ignore the first parameter explicitely - # lambda _: cb(*args, **kw) self.deferred_initialization.addCallback(cb, *args, **kw) return self.deferred_initialization diff --git a/src/leap/bitmask/mua/pixelizer.py b/src/leap/bitmask/mua/pixelizer.py index b9ceac9e..138774b0 100644 --- a/src/leap/bitmask/mua/pixelizer.py +++ b/src/leap/bitmask/mua/pixelizer.py @@ -32,6 +32,7 @@ However, some care has to be taken to avoid certain types of concurrency bugs. import json import os +import string import sys from twisted.internet import defer, reactor @@ -54,16 +55,71 @@ try: class _LeapMailStore(LeapMailStore): + # TODO We NEED TO rewrite the whole LeapMailStore in the coming + # pixelated fork so that we reuse the account instance. + # Otherwise, the current system for notifications will break. + # The other option is to have generic event listeners, using zmq, and + # allow the pixelated instance to have its own hierarchy of + # account-mailbox instances, only sharing soledad. + # However, this seems good enough since it's now better to wait until + # we depend on leap.pixelated fork to make changes on that codebase. + # When that refactor starts, we should try to internalize as much + # work/bugfixes was done in pixelated, and incorporate it into the + # public bitmask api. Let's learn from our mistakes. + def __init__(self, soledad, account): self.account = account super(_LeapMailStore, self).__init__(soledad) - # We should rewrite the LeapMailStore in the coming pixelated fork so - # that we reuse the account instance. @defer.inlineCallbacks def add_mail(self, mailbox_name, raw_msg): - inbox = yield self.account.get_collection_by_mailbox(mailbox_name) - yield inbox.add_msg(raw_msg, ('\\Recent',), notify_just_mdoc=False) + name = yield self._get_case_insensitive_mbox(mailbox_name) + mailbox = yield self.account.get_collection_by_mailbox(name) + flags = ['\\Recent'] + if mailbox_name.lower() == 'sent': + flags += '\\Seen' + message = yield mailbox.add_msg( + raw_msg, tuple(flags), notify_just_mdoc=False) + + # this still needs the pixelated interface because it does stuff + # like indexing the mail in whoosh, etc. + mail = yield self._leap_message_to_leap_mail( + message.get_wrapper().mdoc.doc_id, message, include_body=True) + defer.returnValue(mail) + + def get_mailbox_names(self): + """returns: deferred""" + return self.account.list_all_mailbox_names() + + @defer.inlineCallbacks + def _get_or_create_mailbox(self, mailbox_name): + """ + Avoid creating variations of the case. + If there's already a 'Sent' folder, do not create 'SENT', just + return that. + """ + name = yield self._get_case_insensitive_mbox(mailbox_name) + if name is None: + name = mailbox_name + yield self.account.add_mailbox(name) + mailbox = yield self.account.get_collection_by_mailbox( + name) + + # Pixelated expects the mailbox wrapper; + # it should limit itself to the Mail API instead. + # This is also a smell that the collection-mailbox-wrapper + # distinction is not clearly cut. + defer.returnValue(mailbox.mbox_wrapper) + + @defer.inlineCallbacks + def _get_case_insensitive_mbox(self, mailbox_name): + name = None + mailboxes = yield self.get_mailbox_names() + lower = mailbox_name.lower() + lower_mboxes = map(string.lower, mailboxes) + if lower in lower_mboxes: + name = mailboxes[lower_mboxes.index(lower)] + defer.returnValue(name) except ImportError as exc: |