summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKali Kaneko <kali@leap.se>2015-06-18 23:20:45 -0400
committerKali Kaneko <kali@leap.se>2015-06-22 11:39:20 -0400
commitc8dfed5b5f4ccb87003119f14e189566219365bb (patch)
tree54ac88958970358dc02c808c8f831d3cd254baaa
parent1348798e88c6026f57ff938c59a2dce979a9a891 (diff)
[bug] fixes for display attachments and move between folders0.4.0-alpha
- Add errback handling to catch properly errors that were not allowing the complete insertion of the parts for a given message. Fixes blank attachments and moving of messages to different folders. - Force overwritting of mdoc when it is a copy. This was avoiding a message to be copied back to a folder from where it already had been copied to another (since the mdoc was already existing there, with the same doc_id, which was forbidding the creation of the new one). This case also needs special care in the indexer, since we have to delete the old hash entry first. Closes: #7178, #7158
-rw-r--r--src/leap/mail/adaptors/soledad.py45
-rw-r--r--src/leap/mail/imap/mailbox.py5
-rw-r--r--src/leap/mail/mail.py49
-rw-r--r--src/leap/mail/mailbox_indexer.py4
-rw-r--r--src/leap/mail/sync_hooks.py2
5 files changed, 86 insertions, 19 deletions
diff --git a/src/leap/mail/adaptors/soledad.py b/src/leap/mail/adaptors/soledad.py
index b8e5fd4..dc0960f 100644
--- a/src/leap/mail/adaptors/soledad.py
+++ b/src/leap/mail/adaptors/soledad.py
@@ -24,6 +24,7 @@ from email import message_from_string
from pycryptopp.hash import sha256
from twisted.internet import defer
+from twisted.python import log
from zope.interface import implements
import u1db
@@ -108,7 +109,7 @@ class SoledadDocumentWrapper(models.DocumentWrapper):
def set_future_doc_id(self, doc_id):
self._future_doc_id = doc_id
- def create(self, store):
+ def create(self, store, is_copy=False):
"""
Create the documents for this wrapper.
Since this method will not check for duplication, the
@@ -130,13 +131,28 @@ class SoledadDocumentWrapper(models.DocumentWrapper):
self.set_future_doc_id(None)
return doc
+ def update_wrapper(failure):
+ # In the case of some copies (for instance, from one folder to
+ # another and back to the original folder), the document that we
+ # want to insert already exists. In this case, putting it
+ # and overwriting the document with that doc_id is the right thing
+ # to do.
+ failure.trap(u1db.errors.RevisionConflict)
+ self._doc_id = self.future_doc_id
+ self._future_doc_id = None
+ return self.update(store)
+
if self.future_doc_id is None:
d = store.create_doc(self.serialize())
else:
d = store.create_doc(self.serialize(),
doc_id=self.future_doc_id)
d.addCallback(update_doc_id)
- d.addErrback(self._catch_revision_conflict, self.future_doc_id)
+
+ if is_copy:
+ d.addErrback(update_wrapper),
+ else:
+ d.addErrback(self._catch_revision_conflict, self.future_doc_id)
return d
def update(self, store):
@@ -542,8 +558,8 @@ class MessageWrapper(object):
# TODO check that the doc_ids in the mdoc are coherent
self.d = []
- mdoc_created = self.mdoc.create(store)
- fdoc_created = self.fdoc.create(store)
+ mdoc_created = self.mdoc.create(store, is_copy=self._is_copy)
+ fdoc_created = self.fdoc.create(store, is_copy=self._is_copy)
self.d.append(mdoc_created)
self.d.append(fdoc_created)
@@ -558,7 +574,12 @@ class MessageWrapper(object):
continue
self.d.append(cdoc.create(store))
- self.all_inserted_d = defer.gatherResults(self.d)
+ def log_all_inserted(result):
+ log.msg("All parts inserted for msg!")
+ return result
+
+ self.all_inserted_d = defer.gatherResults(self.d, consumeErrors=True)
+ self.all_inserted_d.addCallback(log_all_inserted)
if notify_just_mdoc:
self.all_inserted_d.addCallback(unblock_pending_insert)
@@ -605,8 +626,10 @@ class MessageWrapper(object):
new_wrapper.set_mbox_uuid(new_mbox_uuid)
# XXX could flag so that it only creates mdoc/fdoc...
+
d = new_wrapper.create(store)
d.addCallback(lambda result: new_wrapper)
+ d.addErrback(lambda failure: log.err(failure))
return d
def set_mbox_uuid(self, mbox_uuid):
@@ -942,10 +965,14 @@ class SoledadMailAdaptor(SoledadIndexMixin):
fdoc_id = _get_fdoc_id_from_mdoc_id()
def wrap_fdoc(doc):
+ if not doc:
+ return
cls = FlagsDocWrapper
return cls(doc_id=doc.doc_id, **doc.content)
def get_flags(fdoc_wrapper):
+ if not fdoc_wrapper:
+ return []
return fdoc_wrapper.get_flags()
d = store.get_doc(fdoc_id)
@@ -983,8 +1010,8 @@ class SoledadMailAdaptor(SoledadIndexMixin):
"""
Delete all messages flagged as deleted.
"""
- def err(f):
- f.printTraceback()
+ def err(failure):
+ log.err(failure)
def delete_fdoc_and_mdoc_flagged(fdocs):
# low level here, not using the wrappers...
@@ -1118,8 +1145,8 @@ class SoledadMailAdaptor(SoledadIndexMixin):
"""
return MailboxWrapper.get_all(store)
- def _errback(self, f):
- f.printTraceback()
+ def _errback(self, failure):
+ log.err(failure)
def _split_into_parts(raw):
diff --git a/src/leap/mail/imap/mailbox.py b/src/leap/mail/imap/mailbox.py
index 1412344..c4821ff 100644
--- a/src/leap/mail/imap/mailbox.py
+++ b/src/leap/mail/imap/mailbox.py
@@ -846,8 +846,9 @@ class IMAPMailbox(object):
#deferLater(self.reactor, 0, self._do_copy, message, d)
#return d
- return self.collection.copy_msg(message.message,
- self.collection.mbox_uuid)
+ d = self.collection.copy_msg(message.message,
+ self.collection.mbox_uuid)
+ return d
# convenience fun
diff --git a/src/leap/mail/mail.py b/src/leap/mail/mail.py
index 8cb0b4a..bf5b34d 100644
--- a/src/leap/mail/mail.py
+++ b/src/leap/mail/mail.py
@@ -25,6 +25,7 @@ import time
import weakref
from twisted.internet import defer
+from twisted.python import log
from leap.common.check import leap_assert_type
from leap.common.events import emit, catalog
@@ -559,7 +560,7 @@ class MessageCollection(object):
"""
Add a message to this collection.
- :param raw_message: the raw message
+ :param raw_msg: the raw message
:param flags: tuple of flags for this message
:param tags: tuple of tags for this message
:param date:
@@ -619,7 +620,7 @@ class MessageCollection(object):
# so workaround is to make sure we always check for it before
# inserting the doc. I should debug into the real cause.
d = self.mbox_indexer.create_table(self.mbox_uuid)
- d.addCallback(lambda _: self.mbox_indexer.insert_doc(
+ d.addBoth(lambda _: self.mbox_indexer.insert_doc(
self.mbox_uuid, doc_id))
return d
@@ -664,12 +665,52 @@ class MessageCollection(object):
Copy the message to another collection. (it only makes sense for
mailbox collections)
"""
+ # TODO should CHECK first if the mdoc is present in the mailbox
+ # WITH a Deleted flag... and just simply remove the flag...
+ # Another option is to delete the previous mdoc if it already exists
+ # (so we get a new UID)
+
if not self.is_mailbox_collection():
raise NotImplementedError()
+ def delete_mdoc_entry_and_insert(failure, mbox_uuid, doc_id):
+ d = self.mbox_indexer.delete_doc_by_hash(mbox_uuid, doc_id)
+ d.addCallback(lambda _: self.mbox_indexer.insert_doc(
+ new_mbox_uuid, doc_id))
+ return d
+
def insert_copied_mdoc_id(wrapper_new_msg):
- return self.mbox_indexer.insert_doc(
- new_mbox_uuid, wrapper_new_msg.mdoc.doc_id)
+ # XXX FIXME -- since this is already saved, the future_doc_id
+ # should be already copied into the doc_id!
+ # Investigate why we are not receiving the already saved doc_id
+ doc_id = wrapper_new_msg.mdoc.doc_id
+ if not doc_id:
+ doc_id = wrapper_new_msg.mdoc._future_doc_id
+
+ def insert_conditionally(uid, mbox_uuid, doc_id):
+ indexer = self.mbox_indexer
+ if uid:
+ d = indexer.delete_doc_by_hash(mbox_uuid, doc_id)
+ d.addCallback(lambda _: indexer.insert_doc(
+ new_mbox_uuid, doc_id))
+ return d
+ else:
+ d = indexer.insert_doc(mbox_uuid, doc_id)
+ return d
+
+ def log_result(result):
+ return result
+
+ def insert_doc(_, mbox_uuid, doc_id):
+ d = self.mbox_indexer.get_uid_from_doc_id(mbox_uuid, doc_id)
+ d.addCallback(insert_conditionally, mbox_uuid, doc_id)
+ d.addErrback(lambda err: log.failure(err))
+ d.addCallback(log_result)
+ return d
+
+ d = self.mbox_indexer.create_table(new_mbox_uuid)
+ d.addBoth(insert_doc, new_mbox_uuid, doc_id)
+ return d
wrapper = msg.get_wrapper()
diff --git a/src/leap/mail/mailbox_indexer.py b/src/leap/mail/mailbox_indexer.py
index ab0967d..08e5f10 100644
--- a/src/leap/mail/mailbox_indexer.py
+++ b/src/leap/mail/mailbox_indexer.py
@@ -104,7 +104,6 @@ class MailboxIndexer(object):
"uid INTEGER PRIMARY KEY AUTOINCREMENT, "
"hash TEXT UNIQUE NOT NULL)".format(
preffix=self.table_preffix, name=sanitize(mailbox_uuid)))
- print "CREATING TABLE..."
return self._operation(sql)
def delete_table(self, mailbox_uuid):
@@ -190,8 +189,7 @@ class MailboxIndexer(object):
:type mailbox: str
:param doc_id: the doc_id for the MetaMsg
:type doc_id: str
- :return: a deferred that will fire with the uid of the newly inserted
- document.
+ :return: a deferred that will fire when the deletion has succed.
:rtype: Deferred
"""
check_good_uuid(mailbox_uuid)
diff --git a/src/leap/mail/sync_hooks.py b/src/leap/mail/sync_hooks.py
index a8a69c9..bd8d88d 100644
--- a/src/leap/mail/sync_hooks.py
+++ b/src/leap/mail/sync_hooks.py
@@ -86,7 +86,7 @@ class MailProcessingPostSyncHook(object):
# have seen -- but make sure *it's already created* before
# inserting the index entry!.
d = indexer.create_table(mbox_uuid)
- d.addCallback(lambda _: indexer.insert_doc(mbox_uuid, index_docid))
+ d.addBoth(lambda _: indexer.insert_doc(mbox_uuid, index_docid))
self._processing_deferreds.append(d)
def _process_queued_docs(self):