diff options
author | Kali Kaneko <kali@leap.se> | 2014-01-28 18:39:59 -0400 |
---|---|---|
committer | Kali Kaneko <kali@leap.se> | 2014-01-30 14:20:44 -0400 |
commit | 4436fe60288dd26f2490c3828b0e6d321ea18576 (patch) | |
tree | f1705063cfe702c1f2c10fce6101487be7b2495d /mail | |
parent | 268570e5ff884b1981aff728668d09344160b86b (diff) |
docstring fixes
Also some fixes for None comparisons.
Diffstat (limited to 'mail')
-rw-r--r-- | mail/src/leap/mail/imap/account.py | 4 | ||||
-rw-r--r-- | mail/src/leap/mail/imap/interfaces.py | 1 | ||||
-rw-r--r-- | mail/src/leap/mail/imap/mailbox.py | 31 | ||||
-rw-r--r-- | mail/src/leap/mail/imap/memorystore.py | 215 | ||||
-rw-r--r-- | mail/src/leap/mail/imap/messageparts.py | 129 | ||||
-rw-r--r-- | mail/src/leap/mail/imap/messages.py | 240 | ||||
-rw-r--r-- | mail/src/leap/mail/imap/server.py | 17 | ||||
-rw-r--r-- | mail/src/leap/mail/imap/soledadstore.py | 87 | ||||
-rwxr-xr-x | mail/src/leap/mail/imap/tests/leap_tests_imap.zsh | 3 | ||||
-rw-r--r-- | mail/src/leap/mail/size.py | 2 | ||||
-rw-r--r-- | mail/src/leap/mail/utils.py | 4 |
11 files changed, 373 insertions, 360 deletions
diff --git a/mail/src/leap/mail/imap/account.py b/mail/src/leap/mail/imap/account.py index 7641ea82..f985c049 100644 --- a/mail/src/leap/mail/imap/account.py +++ b/mail/src/leap/mail/imap/account.py @@ -48,7 +48,7 @@ class SoledadBackedAccount(WithMsgFields, IndexedDB, MBoxParser): selected = None closed = False - def __init__(self, account_name, soledad=None, memstore=None): + def __init__(self, account_name, soledad, memstore=None): """ Creates a SoledadAccountIndex that keeps track of the mailboxes and subscriptions handled by this account. @@ -134,7 +134,7 @@ class SoledadBackedAccount(WithMsgFields, IndexedDB, MBoxParser): if name not in self.mailboxes: raise imap4.MailboxException("No such mailbox: %r" % name) - return SoledadMailbox(name, soledad=self._soledad, + return SoledadMailbox(name, self._soledad, memstore=self._memstore) ## diff --git a/mail/src/leap/mail/imap/interfaces.py b/mail/src/leap/mail/imap/interfaces.py index 585165a4..c906278c 100644 --- a/mail/src/leap/mail/imap/interfaces.py +++ b/mail/src/leap/mail/imap/interfaces.py @@ -75,6 +75,7 @@ class IMessageStore(Interface): :param mbox: the mbox this message belongs. :param uid: the UID that identifies this message in this mailbox. + :return: IMessageContainer """ diff --git a/mail/src/leap/mail/imap/mailbox.py b/mail/src/leap/mail/imap/mailbox.py index b5c57191..a0eb0a97 100644 --- a/mail/src/leap/mail/imap/mailbox.py +++ b/mail/src/leap/mail/imap/mailbox.py @@ -26,7 +26,6 @@ import cStringIO from collections import defaultdict from twisted.internet import defer -#from twisted.internet.task import deferLater from twisted.python import log from twisted.mail import imap4 @@ -99,7 +98,6 @@ class SoledadMailbox(WithMsgFields, MBoxParser): :param rw: read-and-write flag for this mailbox :type rw: int """ - print "got memstore: ", memstore leap_assert(mbox, "Need a mailbox name to initialize") leap_assert(soledad, "Need a soledad instance to initialize") @@ -240,10 +238,11 @@ class SoledadMailbox(WithMsgFields, MBoxParser): the mailbox document in soledad if this is higher. :return: the last uid for messages in this mailbox - :rtype: bool + :rtype: int """ last = self._memstore.get_last_uid(self.mbox) - print "last uid for %s: %s (from memstore)" % (self.mbox, last) + logger.debug("last uid for %s: %s (from memstore)" % ( + repr(self.mbox), last)) return last last_uid = property( @@ -468,7 +467,6 @@ class SoledadMailbox(WithMsgFields, MBoxParser): """ Remove all messages flagged \\Deleted """ - print "EXPUNGE!" if not self.isWriteable(): raise imap4.ReadOnlyMailbox @@ -537,8 +535,6 @@ class SoledadMailbox(WithMsgFields, MBoxParser): # can treat them all the same. # Change this to the flag that twisted expects when we # switch to content-hash based index + local UID table. - print - print "FETCHING..." sequence = False #sequence = True if uid == 0 else False @@ -648,9 +644,12 @@ class SoledadMailbox(WithMsgFields, MBoxParser): for msgid in seq_messg) return result - def signal_unread_to_ui(self): + def signal_unread_to_ui(self, *args, **kwargs): """ Sends unread event to ui. + + :param args: ignored + :param kwargs: ignored """ unseen = self.getUnseenCount() leap_events.signal(IMAP_UNREAD_MAIL, str(unseen)) @@ -767,13 +766,12 @@ class SoledadMailbox(WithMsgFields, MBoxParser): # IMessageCopier - @deferred + #@deferred #@profile def copy(self, messageObject): """ Copy the given message object into this mailbox. """ - from twisted.internet import reactor msg = messageObject memstore = self._memstore @@ -791,23 +789,16 @@ class SoledadMailbox(WithMsgFields, MBoxParser): exist = dest_fdoc and not empty(dest_fdoc.content) if exist: - print "Destination message already exists!" + logger.warning("Destination message already exists!") else: - print "DO COPY MESSAGE!" mbox = self.mbox uid_next = memstore.increment_last_soledad_uid(mbox) new_fdoc[self.UID_KEY] = uid_next new_fdoc[self.MBOX_KEY] = mbox - # XXX set recent! - - print "****************************" - print "copy message..." - print "new fdoc ", new_fdoc - print "hdoc: ", hdoc - print "****************************" + # FIXME set recent! - self._memstore.create_message( + return self._memstore.create_message( self.mbox, uid_next, MessageWrapper( new_fdoc, hdoc.content), diff --git a/mail/src/leap/mail/imap/memorystore.py b/mail/src/leap/mail/imap/memorystore.py index 60e98c71..2d60b13f 100644 --- a/mail/src/leap/mail/imap/memorystore.py +++ b/mail/src/leap/mail/imap/memorystore.py @@ -199,12 +199,14 @@ class MemoryStore(object): By default we consider that any message is a new message. :param mbox: the mailbox - :type mbox: basestring + :type mbox: str or unicode :param uid: the UID for the message :type uid: int - :param message: a to be added + :param message: a message to be added :type message: MessageWrapper - :param notify_on_disk: + :param notify_on_disk: whether the deferred that is returned should + wait until the message is written to disk to + be fired. :type notify_on_disk: bool :return: a Deferred. if notify_on_disk is True, will be fired @@ -212,7 +214,7 @@ class MemoryStore(object): Otherwise will fire inmediately :rtype: Deferred """ - print "adding new doc to memstore %s (%s)" % (mbox, uid) + log.msg("adding new doc to memstore %r (%r)" % (mbox, uid)) key = mbox, uid self._add_message(mbox, uid, message, notify_on_disk) @@ -239,13 +241,17 @@ class MemoryStore(object): """ Put an existing message. + This will set the dirty flag on the MemoryStore. + :param mbox: the mailbox - :type mbox: basestring + :type mbox: str or unicode :param uid: the UID for the message :type uid: int - :param message: a to be added + :param message: a message to be added :type message: MessageWrapper - :param notify_on_disk: + :param notify_on_disk: whether the deferred that is returned should + wait until the message is written to disk to + be fired. :type notify_on_disk: bool :return: a Deferred. if notify_on_disk is True, will be fired @@ -260,11 +266,13 @@ class MemoryStore(object): self._dirty.add(key) self._dirty_deferreds[key] = d self._add_message(mbox, uid, message, notify_on_disk) - #print "dirty ", self._dirty - #print "new ", self._new return d def _add_message(self, mbox, uid, message, notify_on_disk=True): + """ + Helper method, called by both create_message and put_message. + See those for parameter documentation. + """ # XXX have to differentiate between notify_new and notify_dirty # TODO defaultdict the hell outa here... @@ -332,15 +340,19 @@ class MemoryStore(object): store.pop(key) prune((FDOC, HDOC, CDOCS, DOCS_ID), store) - #print "after adding: " - #import pprint; pprint.pprint(self._msg_store[key]) - def get_docid_for_fdoc(self, mbox, uid): """ - Get Soledad document id for the flags-doc for a given mbox and uid. + Return Soledad document id for the flags-doc for a given mbox and uid, + or None of no flags document could be found. + + :param mbox: the mailbox + :type mbox: str or unicode + :param uid: the message UID + :type uid: int + :rtype: unicode or None """ fdoc = self._permanent_store.get_flags_doc(mbox, uid) - if not fdoc: + if empty(fdoc): return None doc_id = fdoc.doc_id return doc_id @@ -349,22 +361,30 @@ class MemoryStore(object): """ Get a MessageWrapper for the given mbox and uid combination. + :param mbox: the mailbox + :type mbox: str or unicode + :param uid: the message UID + :type uid: int + :return: MessageWrapper or None """ key = mbox, uid msg_dict = self._msg_store.get(key, None) - if msg_dict: - new, dirty = self._get_new_dirty_state(key) - return MessageWrapper(from_dict=msg_dict, - new=new, - dirty=dirty, - memstore=weakref.proxy(self)) - else: + if empty(msg_dict): return None + new, dirty = self._get_new_dirty_state(key) + return MessageWrapper(from_dict=msg_dict, + new=new, dirty=dirty, + memstore=weakref.proxy(self)) def remove_message(self, mbox, uid): """ Remove a Message from this MemoryStore. + + :param mbox: the mailbox + :type mbox: str or unicode + :param uid: the message UID + :type uid: int """ # XXX For the moment we are only removing the flags and headers # docs. The rest we leave there polluting your hard disk, @@ -386,6 +406,8 @@ class MemoryStore(object): def write_messages(self, store): """ Write the message documents in this MemoryStore to a different store. + + :param store: the IMessageStore to write to """ # For now, we pass if the queue is not empty, to avoid duplicate # queuing. @@ -397,7 +419,10 @@ class MemoryStore(object): if not self.producer.is_queue_empty(): return - print "Writing messages to Soledad..." + logger.info("Writing messages to Soledad...") + + # TODO change for lock, and make the property access + # is accquired with set_bool_flag(self, self.WRITING_FLAG): for rflags_doc_wrapper in self.all_rdocs_iter(): self.producer.push(rflags_doc_wrapper) @@ -409,6 +434,9 @@ class MemoryStore(object): def get_uids(self, mbox): """ Get all uids for a given mbox. + + :param mbox: the mailbox + :type mbox: str or unicode """ all_keys = self._msg_store.keys() return [uid for m, uid in all_keys if m == mbox] @@ -420,6 +448,9 @@ class MemoryStore(object): Get the highest UID for a given mbox. It will be the highest between the highest uid in the message store for the mailbox, and the soledad integer cache. + + :param mbox: the mailbox + :type mbox: str or unicode """ uids = self.get_uids(mbox) last_mem_uid = uids and max(uids) or 0 @@ -429,6 +460,9 @@ class MemoryStore(object): def get_last_soledad_uid(self, mbox): """ Get last uid for a given mbox from the soledad integer cache. + + :param mbox: the mailbox + :type mbox: str or unicode """ return self._last_uid.get(mbox, 0) @@ -438,10 +472,16 @@ class MemoryStore(object): SoledadMailbox should prime this value during initialization. Other methods (during message adding) SHOULD call `increment_last_soledad_uid` instead. + + :param mbox: the mailbox + :type mbox: str or unicode + :param value: the value to set + :type value: int """ leap_assert_type(value, int) - print "setting last soledad uid for ", mbox, "to", value - # if we already have a vlue here, don't do anything + logger.info("setting last soledad uid for %s to %s" % + (mbox, value)) + # if we already have a value here, don't do anything with self._last_uid_lock: if not self._last_uid.get(mbox, None): self._last_uid[mbox] = value @@ -451,6 +491,9 @@ class MemoryStore(object): Increment by one the soledad integer cache for the last_uid for this mbox, and fire a defer-to-thread to update the soledad value. The caller should lock the call tho this method. + + :param mbox: the mailbox + :type mbox: str or unicode """ with self._last_uid_lock: self._last_uid[mbox] += 1 @@ -461,7 +504,12 @@ class MemoryStore(object): @deferred def write_last_uid(self, mbox, value): """ - Increment the soledad cache, + Increment the soledad integer cache for the highest uid value. + + :param mbox: the mailbox + :type mbox: str or unicode + :param value: the value to set + :type value: int """ leap_assert_type(value, int) if self._permanent_store: @@ -472,18 +520,30 @@ class MemoryStore(object): def count_new_mbox(self, mbox): """ Count the new messages by inbox. + + :param mbox: the mailbox + :type mbox: str or unicode + :return: number of new messages + :rtype: int """ return len([(m, uid) for m, uid in self._new if mbox == mbox]) + # XXX used at all? def count_new(self): """ Count all the new messages in the MemoryStore. + + :rtype: int """ return len(self._new) def get_cdoc_from_phash(self, phash): """ Return a content-document by its payload-hash. + + :param phash: the payload hash to check against + :type phash: str or unicode + :rtype: MessagePartDoc """ doc = self._phash_store.get(phash, None) @@ -504,8 +564,16 @@ class MemoryStore(object): def get_fdoc_from_chash(self, chash, mbox): """ Return a flags-document by its content-hash and a given mailbox. + Used during content-duplication detection while copying or adding a + message. + + :param chash: the content hash to check against + :type chash: str or unicode + :param mbox: the mailbox + :type mbox: str or unicode - :return: MessagePartDoc, or None. + :return: MessagePartDoc. It will return None if the flags document + has empty content or it is flagged as \\Deleted. """ docs_dict = self._chash_fdoc_store.get(chash, None) fdoc = docs_dict.get(mbox, None) if docs_dict else None @@ -522,9 +590,10 @@ class MemoryStore(object): if fdoc and fields.DELETED_FLAG in fdoc[fields.FLAGS_KEY]: return None - # XXX get flags - new = True - dirty = False + uid = fdoc.content[fields.UID_KEY] + key = mbox, uid + new = key in self._new + dirty = key in self._dirty return MessagePartDoc( new=new, dirty=dirty, store="mem", part=MessagePartType.fdoc, @@ -534,13 +603,19 @@ class MemoryStore(object): def all_msg_iter(self): """ Return generator that iterates through all messages in the store. + + :return: generator of MessageWrappers + :rtype: generator """ return (self.get_message(*key) for key in sorted(self._msg_store.keys())) def all_new_dirty_msg_iter(self): """ - Return geneator that iterates through all new and dirty messages. + Return generator that iterates through all new and dirty messages. + + :return: generator of MessageWrappers + :rtype: generator """ return (self.get_message(*key) for key in sorted(self._msg_store.keys()) @@ -549,15 +624,29 @@ class MemoryStore(object): def all_msg_dict_for_mbox(self, mbox): """ Return all the message dicts for a given mbox. + + :param mbox: the mailbox + :type mbox: str or unicode + :return: list of dictionaries + :rtype: list """ + # This *needs* to return a fixed sequence. Otherwise the dictionary len + # will change during iteration, when we modify it return [self._msg_store[(mb, uid)] for mb, uid in self._msg_store if mb == mbox] def all_deleted_uid_iter(self, mbox): """ - Return generator that iterates through the UIDs for all messags + Return a list with the UIDs for all messags with deleted flag in a given mailbox. + + :param mbox: the mailbox + :type mbox: str or unicode + :return: list of integers + :rtype: list """ + # This *needs* to return a fixed sequence. Otherwise the dictionary len + # will change during iteration, when we modify it all_deleted = [ msg['fdoc']['uid'] for msg in self.all_msg_dict_for_mbox(mbox) if msg.get('fdoc', None) @@ -569,6 +658,11 @@ class MemoryStore(object): def _get_new_dirty_state(self, key): """ Return `new` and `dirty` flags for a given message. + + :param key: the key for the message, in the form mbox, uid + :type key: tuple + :return: tuple of bools + :rtype: tuple """ # XXX should return *first* the news, and *then* the dirty... return map(lambda _set: key in _set, (self._new, self._dirty)) @@ -576,14 +670,19 @@ class MemoryStore(object): def set_new(self, key): """ Add the key value to the `new` set. + + :param key: the key for the message, in the form mbox, uid + :type key: tuple """ self._new.add(key) def unset_new(self, key): """ Remove the key value from the `new` set. + + :param key: the key for the message, in the form mbox, uid + :type key: tuple """ - #print "Unsetting NEW for: %s" % str(key) self._new.discard(key) deferreds = self._new_deferreds d = deferreds.get(key, None) @@ -596,14 +695,19 @@ class MemoryStore(object): def set_dirty(self, key): """ Add the key value to the `dirty` set. + + :param key: the key for the message, in the form mbox, uid + :type key: tuple """ self._dirty.add(key) def unset_dirty(self, key): """ Remove the key value from the `dirty` set. + + :param key: the key for the message, in the form mbox, uid + :type key: tuple """ - #print "Unsetting DIRTY for: %s" % str(key) self._dirty.discard(key) deferreds = self._dirty_deferreds d = deferreds.get(key, None) @@ -619,6 +723,11 @@ class MemoryStore(object): def set_recent_flag(self, mbox, uid): """ Set the `Recent` flag for a given mailbox and UID. + + :param mbox: the mailbox + :type mbox: str or unicode + :param uid: the message UID + :type uid: int """ self._rflags_dirty.add(mbox) self._rflags_store[mbox]['set'].add(uid) @@ -627,6 +736,11 @@ class MemoryStore(object): def unset_recent_flag(self, mbox, uid): """ Unset the `Recent` flag for a given mailbox and UID. + + :param mbox: the mailbox + :type mbox: str or unicode + :param uid: the message UID + :type uid: int """ self._rflags_store[mbox]['set'].discard(uid) @@ -634,6 +748,11 @@ class MemoryStore(object): """ Set the value for the set of the recent flags. Used from the property in the MessageCollection. + + :param mbox: the mailbox + :type mbox: str or unicode + :param value: a sequence of flags to set + :type value: sequence """ self._rflags_dirty.add(mbox) self._rflags_store[mbox]['set'] = set(value) @@ -643,6 +762,8 @@ class MemoryStore(object): Load the passed flags document in the recent flags store, for a given mailbox. + :param mbox: the mailbox + :type mbox: str or unicode :param flags_doc: A dictionary containing the `doc_id` of the Soledad flags-document for this mailbox, and the `set` of uids marked with that flag. @@ -651,9 +772,11 @@ class MemoryStore(object): def get_recent_flags(self, mbox): """ - Get the set of UIDs with the `Recent` flag for this mailbox. + Return the set of UIDs with the `Recent` flag for this mailbox. - :return: set, or None + :param mbox: the mailbox + :type mbox: str or unicode + :rtype: set, or None """ rflag_for_mbox = self._rflags_store.get(mbox, None) if not rflag_for_mbox: @@ -666,6 +789,7 @@ class MemoryStore(object): under a RecentFlagsDoc namedtuple. Used for saving to disk. + :return: a generator of RecentFlagDoc :rtype: generator """ # XXX use enums @@ -696,6 +820,11 @@ class MemoryStore(object): """ Remove all messages flagged \\Deleted from this Memory Store only. Called from `expunge` + + :param mbox: the mailbox + :type mbox: str or unicode + :return: a list of UIDs + :rtype: list """ mem_deleted = self.all_deleted_uid_iter(mbox) for uid in mem_deleted: @@ -706,6 +835,11 @@ class MemoryStore(object): """ Remove all messages flagged \\Deleted, from the Memory Store and from the permanent store also. + + :param mbox: the mailbox + :type mbox: str or unicode + :return: a list of UIDs + :rtype: list """ # TODO expunge should add itself as a callback to the ongoing # writes. @@ -737,7 +871,7 @@ class MemoryStore(object): mem_deleted = self.remove_all_deleted(mbox) all_deleted = set(mem_deleted).union(set(sol_deleted)) - print "deleted ", all_deleted + logger.debug("deleted %r" % all_deleted) except Exception as exc: logger.exception(exc) finally: @@ -763,18 +897,13 @@ class MemoryStore(object): # are done (gatherResults) return getattr(self, self.WRITING_FLAG) - def put_part(self, part_type, value): - """ - Put the passed part into this IMessageStore. - `part` should be one of: fdoc, hdoc, cdoc - """ - # XXX turn that into a enum - # Memory management. def get_size(self): """ Return the size of the internal storage. Use for calculating the limit beyond which we should flush the store. + + :rtype: int """ return size.get_size(self._msg_store) diff --git a/mail/src/leap/mail/imap/messageparts.py b/mail/src/leap/mail/imap/messageparts.py index 10672eda..5067263b 100644 --- a/mail/src/leap/mail/imap/messageparts.py +++ b/mail/src/leap/mail/imap/messageparts.py @@ -18,7 +18,6 @@ MessagePart implementation. Used from LeapMessage. """ import logging -import re import StringIO import weakref @@ -100,11 +99,10 @@ class MessageWrapper(object): CDOCS = "cdocs" DOCS_ID = "docs_id" - # XXX can use this to limit the memory footprint, - # or is it too premature to optimize? - # Does it work well together with the interfaces.implements? + # Using slots to limit some the memory footprint, + # Add your attribute here. - #__slots__ = ["_dict", "_new", "_dirty", "memstore"] + __slots__ = ["_dict", "_new", "_dirty", "_storetype", "memstore"] def __init__(self, fdoc=None, hdoc=None, cdocs=None, from_dict=None, memstore=None, @@ -141,9 +139,13 @@ class MessageWrapper(object): # properties + # TODO Could refactor new and dirty properties together. + def _get_new(self): """ Get the value for the `new` flag. + + :rtype: bool """ return self._new @@ -151,6 +153,9 @@ class MessageWrapper(object): """ Set the value for the `new` flag, and propagate it to the memory store if any. + + :param value: the value to set + :type value: bool """ self._new = value if self.memstore: @@ -171,6 +176,8 @@ class MessageWrapper(object): def _get_dirty(self): """ Get the value for the `dirty` flag. + + :rtype: bool """ return self._dirty @@ -178,6 +185,9 @@ class MessageWrapper(object): """ Set the value for the `dirty` flag, and propagate it to the memory store if any. + + :param value: the value to set + :type value: bool """ self._dirty = value if self.memstore: @@ -198,6 +208,12 @@ class MessageWrapper(object): @property def fdoc(self): + """ + Return a MessagePartDoc wrapping around a weak reference to + the flags-document in this MemoryStore, if any. + + :rtype: MessagePartDoc + """ _fdoc = self._dict.get(self.FDOC, None) if _fdoc: content_ref = weakref.proxy(_fdoc) @@ -214,6 +230,12 @@ class MessageWrapper(object): @property def hdoc(self): + """ + Return a MessagePartDoc wrapping around a weak reference to + the headers-document in this MemoryStore, if any. + + :rtype: MessagePartDoc + """ _hdoc = self._dict.get(self.HDOC, None) if _hdoc: content_ref = weakref.proxy(_hdoc) @@ -228,6 +250,14 @@ class MessageWrapper(object): @property def cdocs(self): + """ + Return a weak reference to a zero-indexed dict containing + the content-documents, or an empty dict if none found. + If you want access to the MessagePartDoc for the individual + parts, use the generator returned by `walk` instead. + + :rtype: dict + """ _cdocs = self._dict.get(self.CDOCS, None) if _cdocs: return weakref.proxy(_cdocs) @@ -238,6 +268,8 @@ class MessageWrapper(object): """ Generator that iterates through all the parts, returning MessagePartDoc. Used for writing to SoledadStore. + + :rtype: generator """ if self._dirty: mbox = self.fdoc.content[fields.MBOX_KEY] @@ -264,6 +296,8 @@ class MessageWrapper(object): def as_dict(self): """ Return a dict representation of the parts contained. + + :rtype: dict """ return self._dict @@ -272,6 +306,11 @@ class MessageWrapper(object): Populate MessageWrapper parts from a dictionary. It expects the same format that we use in a MessageWrapper. + + + :param msg_dict: a dictionary containing the parts to populate + the MessageWrapper from + :type msg_dict: dict """ fdoc, hdoc, cdocs = map( lambda part: msg_dict.get(part, None), @@ -288,7 +327,7 @@ class MessagePart(object): It takes a subpart message and is able to find the inner parts. - Excusatio non petita: see the interface documentation. + See the interface documentation. """ implements(imap4.IMessagePart) @@ -297,6 +336,8 @@ class MessagePart(object): """ Initializes the MessagePart. + :param soledad: Soledad instance. + :type soledad: Soledad :param part_map: a dictionary containing the parts map for this message :type part_map: dict @@ -313,6 +354,7 @@ class MessagePart(object): # to gather the results of the deferred operations # to signal the operation is complete. #leap_assert(part_map, "part map dict cannot be null") + self._soledad = soledad self._pmap = part_map @@ -323,11 +365,12 @@ class MessagePart(object): :return: size of the message, in octets :rtype: int """ - if not self._pmap: + if empty(self._pmap): return 0 size = self._pmap.get('size', None) - if not size: + if size is None: logger.error("Message part cannot find size in the partmap") + size = 0 return size def getBodyFile(self): @@ -338,25 +381,25 @@ class MessagePart(object): :rtype: StringIO """ fd = StringIO.StringIO() - if self._pmap: + if not empty(self._pmap): multi = self._pmap.get('multi') if not multi: phash = self._pmap.get("phash", None) else: pmap = self._pmap.get('part_map') first_part = pmap.get('1', None) - if first_part: + if not empty(first_part): phash = first_part['phash'] if not phash: logger.warning("Could not find phash for this subpart!") - payload = str("") + payload = "" else: payload = self._get_payload_from_document(phash) else: logger.warning("Message with no part_map!") - payload = str("") + payload = "" if payload: content_type = self._get_ctype_from_document(phash) @@ -366,7 +409,8 @@ class MessagePart(object): charset = self._get_charset(payload) logger.debug("Got charset: %s" % (charset,)) try: - payload = payload.encode(charset) + if isinstance(payload, unicode): + payload = payload.encode(charset) except UnicodeError as exc: logger.error( "Unicode error, using 'replace'. {0!r}".format(exc)) @@ -376,13 +420,15 @@ class MessagePart(object): fd.seek(0) return fd - # TODO cache the phash retrieval + # TODO should memory-bound this memoize!!! + @memoized_method def _get_payload_from_document(self, phash): """ - Gets the message payload from the content document. + Return the message payload from the content document. :param phash: the payload hash to retrieve by. - :type phash: basestring + :type phash: str or unicode + :rtype: str or unicode """ cdocs = self._soledad.get_from_index( fields.TYPE_P_HASH_IDX, @@ -396,13 +442,15 @@ class MessagePart(object): payload = cdoc.content.get(fields.RAW_KEY, "") return payload - # TODO cache the pahash retrieval + # TODO should memory-bound this memoize!!! + @memoized_method def _get_ctype_from_document(self, phash): """ - Gets the content-type from the content document. + Reeturn the content-type from the content document. :param phash: the payload hash to retrieve by. - :type phash: basestring + :type phash: str or unicode + :rtype: str or unicode """ cdocs = self._soledad.get_from_index( fields.TYPE_P_HASH_IDX, @@ -423,13 +471,14 @@ class MessagePart(object): Gets (guesses?) the charset of a payload. :param stuff: the stuff to guess about. - :type stuff: basestring - :returns: charset + :type stuff: str or unicode + :return: charset + :rtype: unicode """ # XXX existential doubt 2. shouldn't we make the scope # of the decorator somewhat more persistent? # ah! yes! and put memory bounds. - return get_email_charset(unicode(stuff)) + return get_email_charset(stuff) def getHeaders(self, negate, *names): """ @@ -446,37 +495,42 @@ class MessagePart(object): :return: A mapping of header field names to header field values :rtype: dict """ + # XXX refactor together with MessagePart method if not self._pmap: logger.warning("No pmap in Subpart!") return {} headers = dict(self._pmap.get("headers", [])) - # twisted imap server expects *some* headers to be lowercase - # We could use a CaseInsensitiveDict here... - headers = dict( - (str(key), str(value)) if key.lower() != "content-type" - else (str(key.lower()), str(value)) - for (key, value) in headers.items()) - names = map(lambda s: s.upper(), names) if negate: cond = lambda key: key.upper() not in names else: cond = lambda key: key.upper() in names - # unpack and filter original dict by negate-condition - filter_by_cond = [ - map(str, (key, val)) for - key, val in headers.items() - if cond(key)] - filtered = dict(filter_by_cond) - return filtered + # default to most likely standard + charset = find_charset(headers, "utf-8") + headers2 = dict() + for key, value in headers.items(): + # twisted imap server expects *some* headers to be lowercase + # We could use a CaseInsensitiveDict here... + if key.lower() == "content-type": + key = key.lower() + + if not isinstance(key, str): + key = key.encode(charset, 'replace') + if not isinstance(value, str): + value = value.encode(charset, 'replace') + + # filter original dict by negate-condition + if cond(key): + headers2[key] = value + return headers2 def isMultipart(self): """ Return True if this message is multipart. """ - if not self._pmap: + if empty(self._pmap): logger.warning("Could not get part map!") return False multi = self._pmap.get("multi", False) @@ -495,6 +549,7 @@ class MessagePart(object): """ if not self.isMultipart(): raise TypeError + sub_pmap = self._pmap.get("part_map", {}) try: part_map = sub_pmap[str(part + 1)] diff --git a/mail/src/leap/mail/imap/messages.py b/mail/src/leap/mail/imap/messages.py index 7617fb83..315cdda6 100644 --- a/mail/src/leap/mail/imap/messages.py +++ b/mail/src/leap/mail/imap/messages.py @@ -58,10 +58,7 @@ logger = logging.getLogger(__name__) # [ ] Delete incoming mail only after successful write! # [ ] Remove UID from syncable db. Store only those indexes locally. -CHARSET_PATTERN = r"""charset=([\w-]+)""" MSGID_PATTERN = r"""<([\w@.]+)>""" - -CHARSET_RE = re.compile(CHARSET_PATTERN, re.IGNORECASE) MSGID_RE = re.compile(MSGID_PATTERN) @@ -202,8 +199,6 @@ class LeapMessage(fields, MailParser, MBoxParser): :return: The flags, represented as strings :rtype: tuple """ - #if self._uid is None: - #return [] uid = self._uid flags = set([]) @@ -252,7 +247,7 @@ class LeapMessage(fields, MailParser, MBoxParser): doc.content[self.DEL_KEY] = self.DELETED_FLAG in flags if self._collection.memstore is not None: - print "putting message in collection" + log.msg("putting message in collection") self._collection.memstore.put_message( self._mbox, self._uid, MessageWrapper(fdoc=doc.content, new=False, dirty=True, @@ -327,8 +322,8 @@ class LeapMessage(fields, MailParser, MBoxParser): if self._bdoc is not None: bdoc_content = self._bdoc.content if bdoc_content is None: - logger.warning("No BODC content found for message!!!") - return write_fd(str("")) + logger.warning("No BDOC content found for message!!!") + return write_fd("") body = bdoc_content.get(self.RAW_KEY, "") content_type = bdoc_content.get('content-type', "") @@ -337,20 +332,13 @@ class LeapMessage(fields, MailParser, MBoxParser): if charset is None: charset = self._get_charset(body) try: - body = body.encode(charset) + if isinstance(body, unicode): + body = body.encode(charset) except UnicodeError as exc: logger.error( "Unicode error, using 'replace'. {0!r}".format(exc)) logger.debug("Attempted to encode with: %s" % charset) - try: - body = body.encode(charset, 'replace') - - # XXX desperate attempt. I've seen things you wouldn't believe - except UnicodeError: - try: - body = body.encode('utf-8', 'replace') - except: - pass + body = body.encode(charset, 'replace') finally: return write_fd(body) @@ -409,6 +397,8 @@ class LeapMessage(fields, MailParser, MBoxParser): :rtype: dict """ # TODO split in smaller methods + # XXX refactor together with MessagePart method + headers = self._get_headers() if not headers: logger.warning("No headers found") @@ -425,11 +415,10 @@ class LeapMessage(fields, MailParser, MBoxParser): # default to most likely standard charset = find_charset(headers, "utf-8") - - # twisted imap server expects *some* headers to be lowercase - # XXX refactor together with MessagePart method headers2 = dict() for key, value in headers.items(): + # twisted imap server expects *some* headers to be lowercase + # We could use a CaseInsensitiveDict here... if key.lower() == "content-type": key = key.lower() @@ -441,7 +430,6 @@ class LeapMessage(fields, MailParser, MBoxParser): # filter original dict by negate-condition if cond(key): headers2[key] = value - return headers2 def _get_headers(self): @@ -547,10 +535,8 @@ class LeapMessage(fields, MailParser, MBoxParser): message. """ hdoc_content = self._hdoc.content - #print "hdoc: ", hdoc_content body_phash = hdoc_content.get( fields.BODY_KEY, None) - print "body phash: ", body_phash if not body_phash: logger.warning("No body phash for this document!") return None @@ -562,11 +548,8 @@ class LeapMessage(fields, MailParser, MBoxParser): if self._container is not None: bdoc = self._container.memstore.get_cdoc_from_phash(body_phash) - print "bdoc from container -->", bdoc if bdoc and bdoc.content is not None: return bdoc - else: - print "no doc or not bdoc content for that phash found!" # no memstore or no doc found there if self._soledad: @@ -590,77 +573,12 @@ class LeapMessage(fields, MailParser, MBoxParser): """ return self._fdoc.content.get(key, None) - # setters - - # XXX to be used in the messagecopier interface?! -# - #def set_uid(self, uid): - #""" - #Set new uid for this message. -# - #:param uid: the new uid - #:type uid: basestring - #""" - # XXX dangerous! lock? - #self._uid = uid - #d = self._fdoc - #d.content[self.UID_KEY] = uid - #self._soledad.put_doc(d) -# - #def set_mbox(self, mbox): - #""" - #Set new mbox for this message. -# - #:param mbox: the new mbox - #:type mbox: basestring - #""" - # XXX dangerous! lock? - #self._mbox = mbox - #d = self._fdoc - #d.content[self.MBOX_KEY] = mbox - #self._soledad.put_doc(d) - - # destructor - - # XXX this logic moved to remove_message in memory store... - #@deferred - #def remove(self): - #""" - #Remove all docs associated with this message. - #Currently it removes only the flags doc. - #""" - #fd = self._get_flags_doc() -# - #if fd.new: - # it's a new document, so we can remove it and it will not - # be writen. Watch out! We need to be sure it has not been - # just queued to write! - #memstore.remove_message(*key) -# - #if fd.dirty: - #doc_id = fd.doc_id - #doc = self._soledad.get_doc(doc_id) - #try: - #self._soledad.delete_doc(doc) - #except Exception as exc: - #logger.exception(exc) -# - #else: - # we just got a soledad_doc - #try: - #doc_id = fd.doc_id - #latest_doc = self._soledad.get_doc(doc_id) - #self._soledad.delete_doc(latest_doc) - #except Exception as exc: - #logger.exception(exc) - #return uid - def does_exist(self): """ - Return True if there is actually a flags message for this + Return True if there is actually a flags document for this UID and mbox. """ - return self._fdoc is not None + return not empty(self._fdoc) class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): @@ -938,8 +856,6 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): if not exist: exist = self._get_fdoc_from_chash(chash) - - print "FDOC EXIST?", exist if exist: return exist.content.get(fields.UID_KEY, "unknown-uid") else: @@ -974,7 +890,6 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): # TODO add the linked-from info ! # TODO add reference to the original message - print "ADDING MESSAGE..." logger.debug('adding message') if flags is None: @@ -990,15 +905,11 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): # move the complete check to the soledad writer? # Watch out! We're reserving a UID right after this! if self._fdoc_already_exists(chash): - print ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" logger.warning("We already have that message in this mailbox.") - # note that this operation will leave holes in the UID sequence, - # but we're gonna change that all the same for a local-only table. - # so not touch it by the moment. return defer.succeed('already_exists') uid = self.memstore.increment_last_soledad_uid(self.mbox) - print "ADDING MSG WITH UID: %s" % uid + logger.info("ADDING MSG WITH UID: %s" % uid) fd = self._populate_flags(flags, uid, chash, size, multi) hd = self._populate_headr(msg, chash, subject, date) @@ -1017,58 +928,36 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): # The MessageContainer expects a dict, zero-indexed # XXX review-me - cdocs = dict((index, doc) for index, doc in - enumerate(walk.get_raw_docs(msg, parts))) + cdocs = dict(enumerate(walk.get_raw_docs(msg, parts))) self.set_recent_flag(uid) # Saving ---------------------------------------- - # XXX adapt hdocset to use memstore - #hdoc = self._soledad.create_doc(hd) - # We add the newly created hdoc to the fast-access set of - # headers documents associated with the mailbox. - #self.add_hdocset_docid(hdoc.doc_id) - # TODO ---- add reference to original doc, to be deleted # after writes are done. msg_container = MessageWrapper(fd, hd, cdocs) - # XXX Should allow also to dump to disk directly, - # for no-memstore cases. - # we return a deferred that by default will be triggered # inmediately. d = self.memstore.create_message(self.mbox, uid, msg_container, notify_on_disk=notify_on_disk) - print "adding message", d return d - #def remove(self, msg): - #""" - #Remove a given msg. - #:param msg: the message to be removed - #:type msg: LeapMessage - #""" - #d = msg.remove() - #d.addCallback(self._remove_cb) - #return d - # # getters: specific queries # # recent flags - # XXX FIXME ------------------------------------- - # This should be rewritten to use memory store. def _get_recent_flags(self): """ An accessor for the recent-flags set for this mailbox. """ + # XXX check if we should remove this if self.__rflags is not None: return self.__rflags - if self.memstore: + if self.memstore is not None: with self._rdoc_lock: rflags = self.memstore.get_recent_flags(self.mbox) if not rflags: @@ -1091,11 +980,12 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): fields.RECENTFLAGS_KEY, [])) return self.__rflags + @profile def _set_recent_flags(self, value): """ Setter for the recent-flags set for this mailbox. """ - if self.memstore: + if self.memstore is not None: self.memstore.set_recent_flags(self.mbox, value) else: @@ -1112,9 +1002,11 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): _get_recent_flags, _set_recent_flags, doc="Set of UIDs with the recent flag for this mailbox.") + # XXX change naming, indicate soledad query. def _get_recent_doc(self): """ - Get recent-flags document for this mailbox. + Get recent-flags document from Soledad for this mailbox. + :rtype: SoledadDocument or None """ curried = partial( self._soledad.get_from_index, @@ -1153,82 +1045,6 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): self.recent_flags = self.recent_flags.union( set([uid])) - # headers-docs-set - - # XXX FIXME ------------------------------------- - # This should be rewritten to use memory store. - - #def _get_hdocset(self): - #""" - #An accessor for the hdocs-set for this mailbox. - #""" - #if not self.__hdocset: - #with self._hdocset_lock: - #hdocset_doc = self._get_hdocset_doc() - #value = set(hdocset_doc.content.get( - #fields.HDOCS_SET_KEY, [])) - #self.__hdocset = value - #return self.__hdocset -# - #def _set_hdocset(self, value): - #""" - #Setter for the hdocs-set for this mailbox. - #""" - #with self._hdocset_lock: - #hdocset_doc = self._get_hdocset_doc() - #newv = set(value) - #self.__hdocset = newv - #hdocset_doc.content[fields.HDOCS_SET_KEY] = list(newv) - # XXX should deferLater 0 it? - #self._soledad.put_doc(hdocset_doc) -# - #_hdocset = property( - #_get_hdocset, _set_hdocset, - #doc="Set of Document-IDs for the headers docs associated " - #"with this mailbox.") -# - #def _get_hdocset_doc(self): - #""" - #Get hdocs-set document for this mailbox. - #""" - #curried = partial( - #self._soledad.get_from_index, - #fields.TYPE_MBOX_IDX, - #fields.TYPE_HDOCS_SET_VAL, self.mbox) - #curried.expected = "hdocset" - #hdocset_doc = try_unique_query(curried) - #return hdocset_doc -# - # Property-set modification (protected by a different - # lock to give atomicity to the read/write operation) -# - #def remove_hdocset_docids(self, docids): - #""" - #Remove the given document IDs from the set of - #header-documents associated with this mailbox. - #""" - #with self._hdocset_property_lock: - #self._hdocset = self._hdocset.difference( - #set(docids)) -# - #def remove_hdocset_docid(self, docid): - #""" - #Remove the given document ID from the set of - #header-documents associated with this mailbox. - #""" - #with self._hdocset_property_lock: - #self._hdocset = self._hdocset.difference( - #set([docid])) -# - #def add_hdocset_docid(self, docid): - #""" - #Add the given document ID to the set of - #header-documents associated with this mailbox. - #""" - #with self._hdocset_property_lock: - #self._hdocset = self._hdocset.union( - #set([docid])) - # individual doc getters, message layer. def _get_fdoc_from_chash(self, chash): @@ -1361,19 +1177,6 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): return (u for u in sorted(uids)) - # XXX Should be moved to memstore - #def reset_last_uid(self, param): - #""" - #Set the last uid to the highest uid found. - #Used while expunging, passed as a callback. - #""" - #try: - #self.last_uid = max(self.all_uid_iter()) + 1 - #except ValueError: - # empty sequence - #pass - #return param - # XXX MOVE to memstore def all_flags(self): """ @@ -1390,7 +1193,6 @@ class MessageCollection(WithMsgFields, IndexedDB, MailParser, MBoxParser): fields.TYPE_MBOX_IDX, fields.TYPE_FLAGS_VAL, self.mbox))) if self.memstore is not None: - # XXX uids = self.memstore.get_uids(self.mbox) docs = ((uid, self.memstore.get_message(self.mbox, uid)) for uid in uids) diff --git a/mail/src/leap/mail/imap/server.py b/mail/src/leap/mail/imap/server.py index 3a6ac9ac..b77678a4 100644 --- a/mail/src/leap/mail/imap/server.py +++ b/mail/src/leap/mail/imap/server.py @@ -20,6 +20,7 @@ Leap IMAP4 Server Implementation. from copy import copy from twisted import cred +from twisted.internet import defer from twisted.internet.defer import maybeDeferred from twisted.internet.task import deferLater from twisted.mail import imap4 @@ -132,6 +133,7 @@ class LeapIMAPServer(imap4.IMAP4Server): ).addErrback( ebFetch, tag) + # XXX should be a callback deferLater(reactor, 2, self.mbox.unset_recent_flags, messages) deferLater(reactor, 1, self.mbox.signal_unread_to_ui) @@ -139,12 +141,17 @@ class LeapIMAPServer(imap4.IMAP4Server): select_FETCH = (do_FETCH, imap4.IMAP4Server.arg_seqset, imap4.IMAP4Server.arg_fetchatt) + def on_copy_finished(self, defers): + d = defer.gatherResults(filter(None, defers)) + d.addCallback(self.notifyNew) + d.addCallback(self.mbox.signal_unread_to_ui) + def do_COPY(self, tag, messages, mailbox, uid=0): from twisted.internet import reactor - imap4.IMAP4Server.do_COPY(self, tag, messages, mailbox, uid) - deferLater(reactor, - 2, self.mbox.unset_recent_flags, messages) - deferLater(reactor, 1, self.mbox.signal_unread_to_ui) + defers = [] + d = imap4.IMAP4Server.do_COPY(self, tag, messages, mailbox, uid) + defers.append(d) + deferLater(reactor, 0, self.on_copy_finished, defers) select_COPY = (do_COPY, imap4.IMAP4Server.arg_seqset, imap4.IMAP4Server.arg_astring) @@ -201,5 +208,5 @@ class LeapIMAPServer(imap4.IMAP4Server): # back to the source mailbox... print "faking checkpoint..." import time - time.sleep(2) + time.sleep(5) return None diff --git a/mail/src/leap/mail/imap/soledadstore.py b/mail/src/leap/mail/imap/soledadstore.py index 60576a35..f64ed233 100644 --- a/mail/src/leap/mail/imap/soledadstore.py +++ b/mail/src/leap/mail/imap/soledadstore.py @@ -22,7 +22,6 @@ import threading from itertools import chain -#from twisted.internet import defer from u1db import errors as u1db_errors from zope.interface import implements @@ -71,7 +70,7 @@ class ContentDedup(object): Check whether we already have a header document for this content hash in our database. - :param doc: tentative header document + :param doc: tentative header for document :type doc: dict :returns: True if it exists, False otherwise. """ @@ -87,8 +86,7 @@ class ContentDedup(object): if len(header_docs) != 1: logger.warning("Found more than one copy of chash %s!" % (chash,)) - # XXX re-enable - #logger.debug("Found header doc with that hash! Skipping save!") + logger.debug("Found header doc with that hash! Skipping save!") return True def _content_does_exist(self, doc): @@ -96,7 +94,7 @@ class ContentDedup(object): Check whether we already have a content document for a payload with this hash in our database. - :param doc: tentative content document + :param doc: tentative content for document :type doc: dict :returns: True if it exists, False otherwise. """ @@ -112,8 +110,7 @@ class ContentDedup(object): if len(attach_docs) != 1: logger.warning("Found more than one copy of phash %s!" % (phash,)) - # XXX re-enable - #logger.debug("Found attachment doc with that hash! Skipping save!") + logger.debug("Found attachment doc with that hash! Skipping save!") return True @@ -151,38 +148,49 @@ class SoledadStore(ContentDedup): Create the passed message into this SoledadStore. :param mbox: the mbox this message belongs. + :type mbox: str or unicode :param uid: the UID that identifies this message in this mailbox. + :type uid: int :param message: a IMessageContainer implementor. """ + raise NotImplementedError() def put_message(self, mbox, uid, message): """ Put the passed existing message into this SoledadStore. :param mbox: the mbox this message belongs. + :type mbox: str or unicode :param uid: the UID that identifies this message in this mailbox. + :type uid: int :param message: a IMessageContainer implementor. """ + raise NotImplementedError() def remove_message(self, mbox, uid): """ Remove the given message from this SoledadStore. :param mbox: the mbox this message belongs. + :type mbox: str or unicode :param uid: the UID that identifies this message in this mailbox. + :type uid: int """ + raise NotImplementedError() def get_message(self, mbox, uid): """ Get a IMessageContainer for the given mbox and uid combination. :param mbox: the mbox this message belongs. + :type mbox: str or unicode :param uid: the UID that identifies this message in this mailbox. + :type uid: int """ + raise NotImplementedError() # IMessageConsumer - #@profile def consume(self, queue): """ Creates a new document in soledad db. @@ -198,8 +206,7 @@ class SoledadStore(ContentDedup): # TODO could generalize this method into a generic consumer # and only implement `process` here - empty = queue.empty() - while not empty: + while not queue.empty(): items = self._process(queue) # we prime the generator, that should return the @@ -213,23 +220,22 @@ class SoledadStore(ContentDedup): for item, call in items: try: self._try_call(call, item) - except Exception: - failed = True + except Exception as exc: + failed = exc continue if failed: raise MsgWriteError except MsgWriteError: logger.error("Error while processing item.") - pass + logger.exception(failed) else: if isinstance(doc_wrapper, MessageWrapper): # If everything went well, we can unset the new flag # in the source store (memory store) - print "unsetting new flag!" + logger.info("unsetting new flag!") doc_wrapper.new = False doc_wrapper.dirty = False - empty = queue.empty() # # SoledadStore specific methods. @@ -253,20 +259,24 @@ class SoledadStore(ContentDedup): return chain((doc_wrapper,), self._get_calls_for_rflags_doc(doc_wrapper)) else: - print "********************" - print "CANNOT PROCESS ITEM!" + logger.warning("CANNOT PROCESS ITEM!") return (i for i in []) def _try_call(self, call, item): """ Try to invoke a given call with item as a parameter. + + :param call: the function to call + :type call: callable + :param item: the payload to pass to the call as argument + :type item: object """ - if not call: + if call is None: return try: call(item) except u1db_errors.RevisionConflict as exc: - logger.error("Error: %r" % (exc,)) + logger.exception("Error: %r" % (exc,)) raise exc def _get_calls_for_msg_parts(self, msg_wrapper): @@ -275,12 +285,14 @@ class SoledadStore(ContentDedup): :param msg_wrapper: A MessageWrapper :type msg_wrapper: IMessageContainer + :return: a generator of tuples with recent-flags doc payload + and callable + :rtype: generator """ call = None - if msg_wrapper.new is True: + if msg_wrapper.new: call = self._soledad.create_doc - print "NEW DOC ----------------------" # item is expected to be a MessagePartDoc for item in msg_wrapper.walk(): @@ -296,17 +308,12 @@ class SoledadStore(ContentDedup): elif item.part == MessagePartType.cdoc: if not self._content_does_exist(item.content): - - # XXX DEBUG ------------------- - print "about to write content-doc ", - #import pprint; pprint.pprint(item.content) - yield dict(item.content), call # For now, the only thing that will be dirty is # the flags doc. - elif msg_wrapper.dirty is True: + elif msg_wrapper.dirty: call = self._soledad.put_doc # item is expected to be a MessagePartDoc for item in msg_wrapper.walk(): @@ -327,6 +334,11 @@ class SoledadStore(ContentDedup): def _get_calls_for_rflags_doc(self, rflags_wrapper): """ We always put these documents. + + :param rflags_wrapper: A wrapper around recent flags doc. + :type rflags_wrapper: RecentFlagsWrapper + :return: a tuple with recent-flags doc payload and callable + :rtype: tuple """ call = self._soledad.put_doc rdoc = self._soledad.get_doc(rflags_wrapper.doc_id) @@ -342,6 +354,8 @@ class SoledadStore(ContentDedup): """ Return mailbox document. + :param mbox: the mailbox + :type mbox: str or unicode :return: A SoledadDocument containing this mailbox, or None if the query failed. :rtype: SoledadDocument or None. @@ -358,6 +372,11 @@ class SoledadStore(ContentDedup): def get_flags_doc(self, mbox, uid): """ Return the SoledadDocument for the given mbox and uid. + + :param mbox: the mailbox + :type mbox: str or unicode + :param uid: the UID for the message + :type uid: int """ try: flag_docs = self._soledad.get_from_index( @@ -378,6 +397,11 @@ class SoledadStore(ContentDedup): This is called from the deferred triggered by memorystore.increment_last_soledad_uid, which is expected to run in a separate thread. + + :param mbox: the mailbox + :type mbox: str or unicode + :param value: the value to set + :type value: int """ leap_assert_type(value, int) key = fields.LAST_UID_KEY @@ -398,6 +422,8 @@ class SoledadStore(ContentDedup): Get an iterator for the SoledadDocuments for messages with \\Deleted flag for a given mailbox. + :param mbox: the mailbox + :type mbox: str or unicode :return: iterator through deleted message docs :rtype: iterable """ @@ -410,13 +436,12 @@ class SoledadStore(ContentDedup): """ Remove from Soledad all messages flagged as deleted for a given mailbox. + + :param mbox: the mailbox + :type mbox: str or unicode """ - print "DELETING ALL DOCS FOR -------", mbox deleted = [] for doc in self.deleted_iter(mbox): deleted.append(doc.content[fields.UID_KEY]) - print - print ">>>>>>>>>>>>>>>>>>>>" - print "deleting doc: ", doc.doc_id, doc.content self._soledad.delete_doc(doc) return deleted diff --git a/mail/src/leap/mail/imap/tests/leap_tests_imap.zsh b/mail/src/leap/mail/imap/tests/leap_tests_imap.zsh index 8f0df9f0..544facaa 100755 --- a/mail/src/leap/mail/imap/tests/leap_tests_imap.zsh +++ b/mail/src/leap/mail/imap/tests/leap_tests_imap.zsh @@ -61,8 +61,7 @@ IMAPTEST="imaptest" # These should be kept constant across benchmarking # runs across different machines, for comparability. -#DURATION=200 -DURATION=60 +DURATION=200 NUM_MSG=200 diff --git a/mail/src/leap/mail/size.py b/mail/src/leap/mail/size.py index 4880d716..c9eaabd3 100644 --- a/mail/src/leap/mail/size.py +++ b/mail/src/leap/mail/size.py @@ -48,10 +48,10 @@ def get_size(item): some memory, so use with care. :param item: the item which size wants to be computed + :rtype: int """ seen = set() size = _get_size(item, seen) - #print "len(seen) ", len(seen) del seen collect() return size diff --git a/mail/src/leap/mail/utils.py b/mail/src/leap/mail/utils.py index 1f439476..6a1fcdeb 100644 --- a/mail/src/leap/mail/utils.py +++ b/mail/src/leap/mail/utils.py @@ -21,6 +21,8 @@ import json import re import traceback +from leap.soledad.common.document import SoledadDocument + CHARSET_PATTERN = r"""charset=([\w-]+)""" CHARSET_RE = re.compile(CHARSET_PATTERN, re.IGNORECASE) @@ -42,6 +44,8 @@ def empty(thing): """ if thing is None: return True + if isinstance(thing, SoledadDocument): + thing = thing.content try: return len(thing) == 0 except ReferenceError: |