summaryrefslogtreecommitdiff
path: root/src/leap/mail/sync_hooks.py
blob: b5bded5b150f388e6b860d63d394d3131ff5997d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
# -*- coding: utf-8 -*-
# sync_hooks.py
# Copyright (C) 2015 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
"""
Soledad PostSync Hooks.

Process every new document of interest after every soledad synchronization,
using the hooks that soledad exposes via plugins.
"""
import logging

from re import compile as regex_compile

from zope.interface import implements
from twisted.internet import defer
from twisted.plugin import IPlugin
from twisted.python import log

from leap.soledad.client.interfaces import ISoledadPostSyncPlugin
from leap.mail import constants


logger = logging.getLogger(__name__)

_get_doc_type_preffix = lambda s: s[:2]


class MailProcessingPostSyncHook(object):
    implements(IPlugin, ISoledadPostSyncPlugin)

    META_DOC_PREFFIX = _get_doc_type_preffix(constants.METAMSGID)
    watched_doc_types = (META_DOC_PREFFIX, )

    _account = None
    _pending_docs = []
    _processing_deferreds = []

    def process_received_docs(self, doc_id_list):
        if self._has_configured_account():
            process_fun = self._make_uid_index
        else:
            self._processing_deferreds = []
            process_fun = self._queue_doc_id

        for doc_id in doc_id_list:
            if _get_doc_type_preffix(doc_id) in self.watched_doc_types:
                log.msg("Mail post-sync hook: processing %s" % doc_id)
                process_fun(doc_id)

        if self._processing_deferreds:
            return defer.gatherResults(self._processing_deferreds)

    def set_account(self, account):
        self._account = account
        if account:
            self._process_queued_docs()

    def _has_configured_account(self):
        return self._account is not None

    def _queue_doc_id(self, doc_id):
        self._pending_docs.append(doc_id)

    def _make_uid_index(self, mdoc_id):
        indexer = self._account.account.mbox_indexer
        mbox_uuid = _get_mbox_uuid(mdoc_id)
        if mbox_uuid:
            chash = _get_chash_from_mdoc(mdoc_id)
            logger.debug("Making index table for %s:%s" % (mbox_uuid, chash))
            index_docid = constants.METAMSGID.format(
                mbox_uuid=mbox_uuid.replace('-', '_'),
                chash=chash)
            # XXX could avoid creating table if I track which ones I already
            # have seen -- but make sure *it's already created* before
            # inserting the index entry!.
            d = indexer.create_table(mbox_uuid)
            d.addCallback(lambda _: indexer.insert_doc(mbox_uuid, index_docid))
            self._processing_deferreds.append(d)

    def _process_queued_docs(self):
        assert(self._has_configured_account())
        pending = self._pending_docs
        log.msg("Mail post-sync hook: processing queued docs")

        def remove_pending_docs(res):
            self._pending_docs = []
            return res

        d = self.process_received_docs(pending)
        if d:
            d.addCallback(remove_pending_docs)
            return d


_mbox_uuid_regex = regex_compile(constants.METAMSGID_MBOX_RE)
_mdoc_chash_regex = regex_compile(constants.METAMSGID_CHASH_RE)


def _get_mbox_uuid(doc_id):
    matches = _mbox_uuid_regex.findall(doc_id)
    if matches:
        return matches[0].replace('_', '-')


def _get_chash_from_mdoc(doc_id):
    matches = _mdoc_chash_regex.findall(doc_id)
    if matches:
        return matches[0]