From 7ad507d24ee8c6f38d527c298311159594922658 Mon Sep 17 00:00:00 2001 From: drebs Date: Mon, 24 Dec 2012 12:24:41 -0200 Subject: Enforce need of password for SQLCipherDatabase --- src/leap/soledad/backends/sqlcipher.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py index 6fd6e619..ae9ca28a 100644 --- a/src/leap/soledad/backends/sqlcipher.py +++ b/src/leap/soledad/backends/sqlcipher.py @@ -39,7 +39,7 @@ from u1db import ( ) -def open(path, create, document_factory=None, password=None): +def open(path, create, password, document_factory=None): """Open a database at the given location. Will raise u1db.errors.DatabaseDoesNotExist if create=False and the @@ -53,7 +53,7 @@ def open(path, create, document_factory=None, password=None): :return: An instance of Database. """ from u1db.backends import sqlite_backend - return sqlite_backend.SQLCipherDatabase.open_database( + return SQLCipherDatabase.open_database( path, password, create=create, document_factory=document_factory) @@ -68,7 +68,7 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): db_handle.cursor().execute("PRAGMA key = '%s'" % key) def __init__(self, sqlite_file, password, document_factory=None): - """Create a new sqlite file.""" + """Create a new sqlcipher file.""" self._db_handle = dbapi2.connect(sqlite_file) SQLCipherDatabase.set_pragma_key(self._db_handle, password) self._real_replica_uid = None -- cgit v1.2.3 From 7aee1e34baeafcc12d44c304bac7fb6fcf3be562 Mon Sep 17 00:00:00 2001 From: drebs Date: Thu, 27 Dec 2012 11:09:39 -0200 Subject: Fix imports and namespaces --- src/leap/soledad/backends/__init__.py | 5 + src/leap/soledad/backends/couch.py | 4 +- src/leap/soledad/backends/leap.py | 175 ------------------------------ src/leap/soledad/backends/leap_backend.py | 174 +++++++++++++++++++++++++++++ src/leap/soledad/backends/objectstore.py | 6 +- 5 files changed, 184 insertions(+), 180 deletions(-) delete mode 100644 src/leap/soledad/backends/leap.py create mode 100644 src/leap/soledad/backends/leap_backend.py (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/__init__.py b/src/leap/soledad/backends/__init__.py index e69de29b..72907f37 100644 --- a/src/leap/soledad/backends/__init__.py +++ b/src/leap/soledad/backends/__init__.py @@ -0,0 +1,5 @@ +import objectstore + + +__all__ = [ + 'objectstore'] diff --git a/src/leap/soledad/backends/couch.py b/src/leap/soledad/backends/couch.py index ed356fdd..101dd4ea 100644 --- a/src/leap/soledad/backends/couch.py +++ b/src/leap/soledad/backends/couch.py @@ -2,8 +2,8 @@ from u1db import errors from u1db.remote.http_target import HTTPSyncTarget from couchdb.client import Server, Document from couchdb.http import ResourceNotFound -from soledad.backends.objectstore import ObjectStore -from soledad.backends.leap import LeapDocument +from leap.soledad.backends.objectstore import ObjectStore +from leap.soledad.backends.leap_backend import LeapDocument try: import simplejson as json diff --git a/src/leap/soledad/backends/leap.py b/src/leap/soledad/backends/leap.py deleted file mode 100644 index 9fbd49fe..00000000 --- a/src/leap/soledad/backends/leap.py +++ /dev/null @@ -1,175 +0,0 @@ -try: - import simplejson as json -except ImportError: - import json # noqa - -from u1db import Document -from u1db.remote.http_target import HTTPSyncTarget -from u1db.remote.http_database import HTTPDatabase -import base64 -from soledad.util import GPGWrapper - - -class NoDefaultKey(Exception): - pass - -class NoSoledadInstance(Exception): - pass - - -class LeapDocument(Document): - """ - LEAP Documents are standard u1db documents with cabability of returning an - encrypted version of the document json string as well as setting document - content based on an encrypted version of json string. - """ - - def __init__(self, doc_id=None, rev=None, json='{}', has_conflicts=False, - encrypted_json=None, soledad=None): - super(LeapDocument, self).__init__(doc_id, rev, json, has_conflicts) - self._soledad = soledad - if encrypted_json: - self.set_encrypted_json(encrypted_json) - - def get_encrypted_json(self): - """ - Returns document's json serialization encrypted with user's public key. - """ - if not self._soledad: - raise NoSoledadInstance() - ciphertext = self._soledad.encrypt_symmetric(self.doc_id, self.get_json()) - return json.dumps({'_encrypted_json' : ciphertext}) - - def set_encrypted_json(self, encrypted_json): - """ - Set document's content based on encrypted version of json string. - """ - if not self._soledad: - raise NoSoledadInstance() - ciphertext = json.loads(encrypted_json)['_encrypted_json'] - plaintext = self._soledad.decrypt_symmetric(self.doc_id, ciphertext) - return self.set_json(plaintext) - - -class LeapDatabase(HTTPDatabase): - """Implement the HTTP remote database API to a Leap server.""" - - def __init__(self, url, document_factory=None, creds=None, soledad=None): - super(LeapDatabase, self).__init__(url, creds=creds) - self._soledad = soledad - self._factory = LeapDocument - - @staticmethod - def open_database(url, create): - db = LeapDatabase(url) - db.open(create) - return db - - @staticmethod - def delete_database(url): - db = LeapDatabase(url) - db._delete() - db.close() - - def get_sync_target(self): - st = LeapSyncTarget(self._url.geturl()) - st._creds = self._creds - return st - - def create_doc_from_json(self, content, doc_id=None): - if doc_id is None: - doc_id = self._allocate_doc_id() - res, headers = self._request_json('PUT', ['doc', doc_id], {}, - content, 'application/json') - new_doc = self._factory(doc_id, res['rev'], content, soledad=self._soledad) - return new_doc - - -class LeapSyncTarget(HTTPSyncTarget): - - def __init__(self, url, creds=None, soledad=None): - super(LeapSyncTarget, self).__init__(url, creds) - self._soledad = soledad - - def _parse_sync_stream(self, data, return_doc_cb, ensure_callback=None): - """ - Does the same as parent's method but ensures incoming content will be - decrypted. - """ - parts = data.splitlines() # one at a time - if not parts or parts[0] != '[': - raise BrokenSyncStream - data = parts[1:-1] - comma = False - if data: - line, comma = utils.check_and_strip_comma(data[0]) - res = json.loads(line) - if ensure_callback and 'replica_uid' in res: - ensure_callback(res['replica_uid']) - for entry in data[1:]: - if not comma: # missing in between comma - raise BrokenSyncStream - line, comma = utils.check_and_strip_comma(entry) - entry = json.loads(line) - # decrypt after receiving from server. - doc = LeapDocument(entry['id'], entry['rev'], - encrypted_json=entry['content'], - soledad=self._soledad) - return_doc_cb(doc, entry['gen'], entry['trans_id']) - if parts[-1] != ']': - try: - partdic = json.loads(parts[-1]) - except ValueError: - pass - else: - if isinstance(partdic, dict): - self._error(partdic) - raise BrokenSyncStream - if not data or comma: # no entries or bad extra comma - raise BrokenSyncStream - return res - - def sync_exchange(self, docs_by_generations, source_replica_uid, - last_known_generation, last_known_trans_id, - return_doc_cb, ensure_callback=None): - """ - Does the same as parent's method but encrypts content before syncing. - """ - self._ensure_connection() - if self._trace_hook: # for tests - self._trace_hook('sync_exchange') - url = '%s/sync-from/%s' % (self._url.path, source_replica_uid) - self._conn.putrequest('POST', url) - self._conn.putheader('content-type', 'application/x-u1db-sync-stream') - for header_name, header_value in self._sign_request('POST', url, {}): - self._conn.putheader(header_name, header_value) - entries = ['['] - size = 1 - - def prepare(**dic): - entry = comma + '\r\n' + json.dumps(dic) - entries.append(entry) - return len(entry) - - comma = '' - size += prepare( - last_known_generation=last_known_generation, - last_known_trans_id=last_known_trans_id, - ensure=ensure_callback is not None) - comma = ',' - for doc, gen, trans_id in docs_by_generations: - # encrypt before sending to server. - size += prepare(id=doc.doc_id, rev=doc.rev, - content=doc.get_encrypted_json(), - gen=gen, trans_id=trans_id) - entries.append('\r\n]') - size += len(entries[-1]) - self._conn.putheader('content-length', str(size)) - self._conn.endheaders() - for entry in entries: - self._conn.send(entry) - entries = None - data, _ = self._response() - res = self._parse_sync_stream(data, return_doc_cb, ensure_callback) - data = None - return res['new_generation'], res['new_transaction_id'] diff --git a/src/leap/soledad/backends/leap_backend.py b/src/leap/soledad/backends/leap_backend.py new file mode 100644 index 00000000..ced8734c --- /dev/null +++ b/src/leap/soledad/backends/leap_backend.py @@ -0,0 +1,174 @@ +try: + import simplejson as json +except ImportError: + import json # noqa + +from u1db import Document +from u1db.remote.http_target import HTTPSyncTarget +from u1db.remote.http_database import HTTPDatabase +from leap.soledad.util import GPGWrapper + + +class NoDefaultKey(Exception): + pass + +class NoSoledadInstance(Exception): + pass + + +class LeapDocument(Document): + """ + LEAP Documents are standard u1db documents with cabability of returning an + encrypted version of the document json string as well as setting document + content based on an encrypted version of json string. + """ + + def __init__(self, doc_id=None, rev=None, json='{}', has_conflicts=False, + encrypted_json=None, soledad=None): + super(LeapDocument, self).__init__(doc_id, rev, json, has_conflicts) + self._soledad = soledad + if encrypted_json: + self.set_encrypted_json(encrypted_json) + + def get_encrypted_json(self): + """ + Returns document's json serialization encrypted with user's public key. + """ + if not self._soledad: + raise NoSoledadInstance() + ciphertext = self._soledad.encrypt_symmetric(self.doc_id, self.get_json()) + return json.dumps({'_encrypted_json' : ciphertext}) + + def set_encrypted_json(self, encrypted_json): + """ + Set document's content based on encrypted version of json string. + """ + if not self._soledad: + raise NoSoledadInstance() + ciphertext = json.loads(encrypted_json)['_encrypted_json'] + plaintext = self._soledad.decrypt_symmetric(self.doc_id, ciphertext) + return self.set_json(plaintext) + + +class LeapDatabase(HTTPDatabase): + """Implement the HTTP remote database API to a Leap server.""" + + def __init__(self, url, document_factory=None, creds=None, soledad=None): + super(LeapDatabase, self).__init__(url, creds=creds) + self._soledad = soledad + self._factory = LeapDocument + + @staticmethod + def open_database(url, create): + db = LeapDatabase(url) + db.open(create) + return db + + @staticmethod + def delete_database(url): + db = LeapDatabase(url) + db._delete() + db.close() + + def get_sync_target(self): + st = LeapSyncTarget(self._url.geturl()) + st._creds = self._creds + return st + + def create_doc_from_json(self, content, doc_id=None): + if doc_id is None: + doc_id = self._allocate_doc_id() + res, headers = self._request_json('PUT', ['doc', doc_id], {}, + content, 'application/json') + new_doc = self._factory(doc_id, res['rev'], content, soledad=self._soledad) + return new_doc + + +class LeapSyncTarget(HTTPSyncTarget): + + def __init__(self, url, creds=None, soledad=None): + super(LeapSyncTarget, self).__init__(url, creds) + self._soledad = soledad + + def _parse_sync_stream(self, data, return_doc_cb, ensure_callback=None): + """ + Does the same as parent's method but ensures incoming content will be + decrypted. + """ + parts = data.splitlines() # one at a time + if not parts or parts[0] != '[': + raise BrokenSyncStream + data = parts[1:-1] + comma = False + if data: + line, comma = utils.check_and_strip_comma(data[0]) + res = json.loads(line) + if ensure_callback and 'replica_uid' in res: + ensure_callback(res['replica_uid']) + for entry in data[1:]: + if not comma: # missing in between comma + raise BrokenSyncStream + line, comma = utils.check_and_strip_comma(entry) + entry = json.loads(line) + # decrypt after receiving from server. + doc = LeapDocument(entry['id'], entry['rev'], + encrypted_json=entry['content'], + soledad=self._soledad) + return_doc_cb(doc, entry['gen'], entry['trans_id']) + if parts[-1] != ']': + try: + partdic = json.loads(parts[-1]) + except ValueError: + pass + else: + if isinstance(partdic, dict): + self._error(partdic) + raise BrokenSyncStream + if not data or comma: # no entries or bad extra comma + raise BrokenSyncStream + return res + + def sync_exchange(self, docs_by_generations, source_replica_uid, + last_known_generation, last_known_trans_id, + return_doc_cb, ensure_callback=None): + """ + Does the same as parent's method but encrypts content before syncing. + """ + self._ensure_connection() + if self._trace_hook: # for tests + self._trace_hook('sync_exchange') + url = '%s/sync-from/%s' % (self._url.path, source_replica_uid) + self._conn.putrequest('POST', url) + self._conn.putheader('content-type', 'application/x-u1db-sync-stream') + for header_name, header_value in self._sign_request('POST', url, {}): + self._conn.putheader(header_name, header_value) + entries = ['['] + size = 1 + + def prepare(**dic): + entry = comma + '\r\n' + json.dumps(dic) + entries.append(entry) + return len(entry) + + comma = '' + size += prepare( + last_known_generation=last_known_generation, + last_known_trans_id=last_known_trans_id, + ensure=ensure_callback is not None) + comma = ',' + for doc, gen, trans_id in docs_by_generations: + # encrypt before sending to server. + size += prepare(id=doc.doc_id, rev=doc.rev, + content=doc.get_encrypted_json(), + gen=gen, trans_id=trans_id) + entries.append('\r\n]') + size += len(entries[-1]) + self._conn.putheader('content-length', str(size)) + self._conn.endheaders() + for entry in entries: + self._conn.send(entry) + entries = None + data, _ = self._response() + res = self._parse_sync_stream(data, return_doc_cb, ensure_callback) + data = None + return res['new_generation'], res['new_transaction_id'] diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py index 61445a1f..9fce88f6 100644 --- a/src/leap/soledad/backends/objectstore.py +++ b/src/leap/soledad/backends/objectstore.py @@ -1,7 +1,7 @@ import uuid from u1db.backends import CommonBackend from u1db import errors, Document -from soledad.util import SyncLog, TransactionLog +from leap.soledad import util as soledadutil class ObjectStore(CommonBackend): @@ -14,8 +14,8 @@ class ObjectStore(CommonBackend): # with the database is established, so it can ensure that u1db data is # configured and up-to-date. self.set_document_factory(Document) - self._sync_log = SyncLog() - self._transaction_log = TransactionLog() + self._sync_log = soledadutil.SyncLog() + self._transaction_log = soledadutil.TransactionLog() self._ensure_u1db_data() #------------------------------------------------------------------------- -- cgit v1.2.3 From 3013233ec2b7e9bb2d7d6a2bccbd6e7a0342d8a5 Mon Sep 17 00:00:00 2001 From: drebs Date: Thu, 3 Jan 2013 15:23:11 -0200 Subject: u1db tests run with nose2 --- src/leap/soledad/backends/sqlcipher.py | 2 -- 1 file changed, 2 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py index ae9ca28a..34434cf5 100644 --- a/src/leap/soledad/backends/sqlcipher.py +++ b/src/leap/soledad/backends/sqlcipher.py @@ -27,8 +27,6 @@ import sys import time import uuid -import pkg_resources - from u1db.backends import CommonBackend, CommonSyncTarget from u1db.backends.sqlite_backend import SQLitePartialExpandDatabase from u1db import ( -- cgit v1.2.3 From 9d82ab69877378382efbf4d73afb334b23ab56f6 Mon Sep 17 00:00:00 2001 From: drebs Date: Thu, 3 Jan 2013 17:16:19 -0200 Subject: LeapDatabase passes HTTPDatabase tests. --- src/leap/soledad/backends/leap_backend.py | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/leap_backend.py b/src/leap/soledad/backends/leap_backend.py index ced8734c..a79fc9e7 100644 --- a/src/leap/soledad/backends/leap_backend.py +++ b/src/leap/soledad/backends/leap_backend.py @@ -8,6 +8,8 @@ from u1db.remote.http_target import HTTPSyncTarget from u1db.remote.http_database import HTTPDatabase from leap.soledad.util import GPGWrapper +import uuid + class NoDefaultKey(Exception): pass @@ -70,6 +72,10 @@ class LeapDatabase(HTTPDatabase): db._delete() db.close() + def _allocate_doc_id(self): + """Generate a unique identifier for this document.""" + return 'D-' + uuid.uuid4().hex # 'D-' stands for document + def get_sync_target(self): st = LeapSyncTarget(self._url.geturl()) st._creds = self._creds -- cgit v1.2.3 From b57548757480aa33e1ba6e4ebf626721a4aa43bc Mon Sep 17 00:00:00 2001 From: drebs Date: Thu, 3 Jan 2013 17:42:34 -0200 Subject: LeapSyncTarget passes HTTPSyncTarget tests. --- src/leap/soledad/backends/leap_backend.py | 4 ++++ 1 file changed, 4 insertions(+) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/leap_backend.py b/src/leap/soledad/backends/leap_backend.py index a79fc9e7..5a7dfa2f 100644 --- a/src/leap/soledad/backends/leap_backend.py +++ b/src/leap/soledad/backends/leap_backend.py @@ -4,12 +4,16 @@ except ImportError: import json # noqa from u1db import Document +from u1db.remote import utils from u1db.remote.http_target import HTTPSyncTarget from u1db.remote.http_database import HTTPDatabase +from u1db.errors import BrokenSyncStream from leap.soledad.util import GPGWrapper import uuid +import uuid + class NoDefaultKey(Exception): pass -- cgit v1.2.3 From 9175349330a205f2c799a02722469cc86efd33db Mon Sep 17 00:00:00 2001 From: drebs Date: Tue, 8 Jan 2013 16:12:41 -0200 Subject: Refactor and details of ObjectStore and CouchDatabase --- src/leap/soledad/backends/couch.py | 26 ++++++++++++++++++++++---- src/leap/soledad/backends/objectstore.py | 21 +++++++++++++-------- 2 files changed, 35 insertions(+), 12 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/couch.py b/src/leap/soledad/backends/couch.py index 101dd4ea..8603a36b 100644 --- a/src/leap/soledad/backends/couch.py +++ b/src/leap/soledad/backends/couch.py @@ -14,7 +14,7 @@ except ImportError: class CouchDatabase(ObjectStore): """A U1DB implementation that uses Couch as its persistence layer.""" - def __init__(self, url, database, full_commit=True, session=None): + def __init__(self, url, database, replica_uid=None, full_commit=True, session=None): """Create a new Couch data container.""" self._url = url self._full_commit = full_commit @@ -22,6 +22,7 @@ class CouchDatabase(ObjectStore): self._server = Server(url=self._url, full_commit=self._full_commit, session=self._session) + self._dbname = database # this will ensure that transaction and sync logs exist and are # up-to-date. self.set_document_factory(LeapDocument) @@ -30,7 +31,7 @@ class CouchDatabase(ObjectStore): except ResourceNotFound: self._server.create(database) self._database = self._server[database] - super(CouchDatabase, self).__init__() + super(CouchDatabase, self).__init__(replica_uid=replica_uid) #------------------------------------------------------------------------- # implemented methods from Database @@ -84,7 +85,15 @@ class CouchDatabase(ObjectStore): return CouchSyncTarget(self) def close(self): - raise NotImplementedError(self.close) + # TODO: fix this method so the connection is properly closed and + # test_close (+tearDown, which deletes the db) works without problems. + self._url = None + self._full_commit = None + self._session = None + #self._server = None + self._database = None + return True + def sync(self, url, creds=None, autocreate=True): from u1db.sync import Synchronizer @@ -100,11 +109,20 @@ class CouchDatabase(ObjectStore): self._replica_uid = content['replica_uid'] self._couch_rev = cdoc['_rev'] + def _set_u1db_data(self): + doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) + doc.content = { 'transaction_log' : self._transaction_log.log, + 'sync_log' : self._sync_log.log, + 'replica_uid' : self._replica_uid, + '_rev' : self._couch_rev} + self._put_doc(doc) + #------------------------------------------------------------------------- # Couch specific methods #------------------------------------------------------------------------- - # no specific methods so far. + def delete_database(self): + del(self._server[self._dbname]) class CouchSyncTarget(HTTPSyncTarget): diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py index 9fce88f6..54ffa9dd 100644 --- a/src/leap/soledad/backends/objectstore.py +++ b/src/leap/soledad/backends/objectstore.py @@ -9,13 +9,14 @@ class ObjectStore(CommonBackend): A backend for storing u1db data in an object store. """ - def __init__(self): + def __init__(self, replica_uid=None): # This initialization method should be called after the connection # with the database is established, so it can ensure that u1db data is # configured and up-to-date. self.set_document_factory(Document) self._sync_log = soledadutil.SyncLog() self._transaction_log = soledadutil.TransactionLog() + self._replica_uid = replica_uid self._ensure_u1db_data() #------------------------------------------------------------------------- @@ -183,7 +184,8 @@ class ObjectStore(CommonBackend): """ Create u1db data object in store. """ - self._replica_uid = uuid.uuid4().hex + if self._replica_uid is None: + self._replica_uid = uuid.uuid4().hex doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) doc.content = { 'transaction_log' : [], 'sync_log' : [], @@ -200,11 +202,14 @@ class ObjectStore(CommonBackend): """ Save u1db configuration data on backend storage. """ - doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) - doc.content = { 'transaction_log' : self._transaction_log.log, - 'sync_log' : self._sync_log.log, - 'replica_uid' : self._replica_uid, - '_rev' : self._couch_rev} - self._put_doc(doc) + NotImplementedError(self._set_u1db_data) + + def _set_replica_uid(self, replica_uid): + self._replica_uid = replica_uid + self._set_u1db_data() + def _get_replica_uid(self): + return self._replica_uid + replica_uid = property( + _get_replica_uid, _set_replica_uid, doc="Replica UID of the database") -- cgit v1.2.3 From a59b34f37fb687de77d1a94f41f53a961baad348 Mon Sep 17 00:00:00 2001 From: drebs Date: Tue, 8 Jan 2013 17:25:06 -0200 Subject: SQLCipherBackend passes all relevant u1db tests. --- src/leap/soledad/backends/sqlcipher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py index 34434cf5..3b03bc95 100644 --- a/src/leap/soledad/backends/sqlcipher.py +++ b/src/leap/soledad/backends/sqlcipher.py @@ -37,7 +37,7 @@ from u1db import ( ) -def open(path, create, password, document_factory=None): +def open(path, password, create, document_factory=None): """Open a database at the given location. Will raise u1db.errors.DatabaseDoesNotExist if create=False and the -- cgit v1.2.3 From 9b7728b80027f35c013a449b884235cf8eac503c Mon Sep 17 00:00:00 2001 From: drebs Date: Wed, 9 Jan 2013 11:46:58 -0200 Subject: CouchDatabase passes u1db LocalDatabaseTests. --- src/leap/soledad/backends/couch.py | 8 +++- src/leap/soledad/backends/objectstore.py | 69 +++++++++++++++++++++++++++----- 2 files changed, 65 insertions(+), 12 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/couch.py b/src/leap/soledad/backends/couch.py index 8603a36b..14021737 100644 --- a/src/leap/soledad/backends/couch.py +++ b/src/leap/soledad/backends/couch.py @@ -58,6 +58,8 @@ class CouchDatabase(ObjectStore): generation = self._get_generation() results = [] for doc_id in self._database: + if doc_id == self.U1DB_DATA_DOC_ID: + continue doc = self._get_doc(doc_id) if doc.content is None and not include_deleted: continue @@ -106,13 +108,15 @@ class CouchDatabase(ObjectStore): content = json.loads(cdoc['u1db_json']) self._sync_log.log = content['sync_log'] self._transaction_log.log = content['transaction_log'] + self._conflict_log.log = content['conflict_log'] self._replica_uid = content['replica_uid'] self._couch_rev = cdoc['_rev'] def _set_u1db_data(self): doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) - doc.content = { 'transaction_log' : self._transaction_log.log, - 'sync_log' : self._sync_log.log, + doc.content = { 'sync_log' : self._sync_log.log, + 'transaction_log' : self._transaction_log.log, + 'conflict_log' : self._conflict_log.log, 'replica_uid' : self._replica_uid, '_rev' : self._couch_rev} self._put_doc(doc) diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py index 54ffa9dd..cd051588 100644 --- a/src/leap/soledad/backends/objectstore.py +++ b/src/leap/soledad/backends/objectstore.py @@ -16,6 +16,7 @@ class ObjectStore(CommonBackend): self.set_document_factory(Document) self._sync_log = soledadutil.SyncLog() self._transaction_log = soledadutil.TransactionLog() + self._conflict_log = soledadutil.ConflictLog() self._replica_uid = replica_uid self._ensure_u1db_data() @@ -44,6 +45,12 @@ class ObjectStore(CommonBackend): def _put_doc(self, doc): raise NotImplementedError(self._put_doc) + def _update_gen_and_transaction_log(self, doc_id): + new_gen = self._get_generation() + 1 + trans_id = self._allocate_transaction_id() + self._transaction_log.append((new_gen, doc_id, trans_id)) + self._set_u1db_data() + def put_doc(self, doc): # consistency check if doc.doc_id is None: @@ -66,11 +73,7 @@ class ObjectStore(CommonBackend): new_rev = self._allocate_doc_rev(doc.rev) doc.rev = new_rev self._put_doc(doc) - # update u1db generation and logs - new_gen = self._get_generation() + 1 - trans_id = self._allocate_transaction_id() - self._transaction_log.append((new_gen, doc.doc_id, trans_id)) - self._set_u1db_data() + self._update_gen_and_transaction_log(doc.doc_id) return doc.rev def delete_doc(self, doc): @@ -87,6 +90,7 @@ class ObjectStore(CommonBackend): doc.rev = new_rev doc.make_tombstone() self._put_doc(doc) + self._update_gen_and_transaction_log(doc.doc_id) return new_rev # start of index-related methods: these are not supported by this backend. @@ -130,6 +134,16 @@ class ObjectStore(CommonBackend): other_transaction_id) self._set_u1db_data() + def _do_set_replica_gen_and_trans_id(self, other_replica_uid, + other_generation, other_transaction_id): + return self._set_replica_gen_and_trans_id( + other_replica_uid, + other_generation, + other_transaction_id) + + def _get_transaction_log(self): + return self._transaction_log.get_transaction_log() + #------------------------------------------------------------------------- # implemented methods from CommonBackend #------------------------------------------------------------------------- @@ -146,9 +160,10 @@ class ObjectStore(CommonBackend): # Documents never have conflicts on server. return False - def _put_and_update_indexes(self, doc_id, old_doc, new_rev, content): - raise NotImplementedError(self._put_and_update_indexes) - + def _put_and_update_indexes(self, old_doc, doc): + # TODO: implement index update + self._put_doc(doc) + self._update_gen_and_transaction_log(doc.doc_id) def _get_trans_id_for_gen(self, generation): self._get_u1db_data() @@ -187,8 +202,9 @@ class ObjectStore(CommonBackend): if self._replica_uid is None: self._replica_uid = uuid.uuid4().hex doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) - doc.content = { 'transaction_log' : [], - 'sync_log' : [], + doc.content = { 'sync_log' : [], + 'transaction_log' : [], + 'conflict_log' : [], 'replica_uid' : self._replica_uid } self._put_doc(doc) @@ -213,3 +229,36 @@ class ObjectStore(CommonBackend): replica_uid = property( _get_replica_uid, _set_replica_uid, doc="Replica UID of the database") + + + #------------------------------------------------------------------------- + # The methods below were cloned from u1db sqlite backend. They should at + # least exist and raise a NotImplementedError exception in CommonBackend + # (should we maybe fill a bug in u1db bts?). + #------------------------------------------------------------------------- + + def _add_conflict(self, doc_id, my_doc_rev, my_content): + self._conflict_log.append((doc_id, my_doc_rev, my_content)) + + def _delete_conflicts(self, doc, conflict_revs): + deleting = [(doc.doc_id, c_rev) for c_rev in conflict_revs] + self._conflict_log.delete_conflicts(deleting) + doc.has_conflicts = self._has_conflicts(doc.doc_id) + + def _prune_conflicts(self, doc, doc_vcr): + if self._has_conflicts(doc.doc_id): + autoresolved = False + c_revs_to_prune = [] + for c_doc in self._get_conflicts(doc.doc_id): + c_vcr = vectorclock.VectorClockRev(c_doc.rev) + if doc_vcr.is_newer(c_vcr): + c_revs_to_prune.append(c_doc.rev) + elif doc.same_content_as(c_doc): + c_revs_to_prune.append(c_doc.rev) + doc_vcr.maximize(c_vcr) + autoresolved = True + if autoresolved: + doc_vcr.increment(self._replica_uid) + doc.rev = doc_vcr.as_str() + c = self._db_handle.cursor() + self._delete_conflicts(c, doc, c_revs_to_prune) -- cgit v1.2.3 From 8909a16a12c17098ae8d5ecd31af6765b543622c Mon Sep 17 00:00:00 2001 From: drebs Date: Thu, 10 Jan 2013 13:18:48 -0200 Subject: CouchDatabase passes all relevant u1db tests. --- src/leap/soledad/backends/couch.py | 34 +++++++++++++--- src/leap/soledad/backends/objectstore.py | 66 +++++++++++++++++++------------- 2 files changed, 68 insertions(+), 32 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/couch.py b/src/leap/soledad/backends/couch.py index 14021737..f071cfad 100644 --- a/src/leap/soledad/backends/couch.py +++ b/src/leap/soledad/backends/couch.py @@ -1,6 +1,8 @@ +import uuid +from base64 import b64encode, b64decode from u1db import errors from u1db.remote.http_target import HTTPSyncTarget -from couchdb.client import Server, Document +from couchdb.client import Server, Document as CouchDocument from couchdb.http import ResourceNotFound from leap.soledad.backends.objectstore import ObjectStore from leap.soledad.backends.leap_backend import LeapDocument @@ -46,7 +48,13 @@ class CouchDatabase(ObjectStore): cdoc = self._database.get(doc_id) if cdoc is None: return None - doc = self._factory(doc_id=doc_id, rev=cdoc['u1db_rev']) + has_conflicts = False + if check_for_conflicts: + has_conflicts = self._has_conflicts(doc_id) + doc = self._factory( + doc_id=doc_id, + rev=cdoc['u1db_rev'], + has_conflicts=has_conflicts) if cdoc['u1db_json'] is not None: doc.content = json.loads(cdoc['u1db_json']) else: @@ -60,7 +68,7 @@ class CouchDatabase(ObjectStore): for doc_id in self._database: if doc_id == self.U1DB_DATA_DOC_ID: continue - doc = self._get_doc(doc_id) + doc = self._get_doc(doc_id, check_for_conflicts=True) if doc.content is None and not include_deleted: continue results.append(doc) @@ -68,7 +76,7 @@ class CouchDatabase(ObjectStore): def _put_doc(self, doc): # prepare couch's Document - cdoc = Document() + cdoc = CouchDocument() cdoc['_id'] = doc.doc_id # we have to guarantee that couch's _rev is cosistent old_cdoc = self._database.get(doc.doc_id) @@ -81,6 +89,7 @@ class CouchDatabase(ObjectStore): cdoc['u1db_json'] = doc.get_json() else: cdoc['u1db_json'] = None + # save doc in db self._database.save(cdoc) def get_sync_target(self): @@ -103,12 +112,22 @@ class CouchDatabase(ObjectStore): return Synchronizer(self, CouchSyncTarget(url, creds=creds)).sync( autocreate=autocreate) + def _initialize(self): + if self._replica_uid is None: + self._replica_uid = uuid.uuid4().hex + doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) + doc.content = { 'sync_log' : [], + 'transaction_log' : [], + 'conflict_log' : b64encode(json.dumps([])), + 'replica_uid' : self._replica_uid } + self._put_doc(doc) + def _get_u1db_data(self): cdoc = self._database.get(self.U1DB_DATA_DOC_ID) content = json.loads(cdoc['u1db_json']) self._sync_log.log = content['sync_log'] self._transaction_log.log = content['transaction_log'] - self._conflict_log.log = content['conflict_log'] + self._conflict_log.log = json.loads(b64decode(content['conflict_log'])) self._replica_uid = content['replica_uid'] self._couch_rev = cdoc['_rev'] @@ -116,7 +135,10 @@ class CouchDatabase(ObjectStore): doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) doc.content = { 'sync_log' : self._sync_log.log, 'transaction_log' : self._transaction_log.log, - 'conflict_log' : self._conflict_log.log, + # Here, the b64 encode ensures that document content + # does not cause strange behaviour in couchdb because + # of encoding. + 'conflict_log' : b64encode(json.dumps(self._conflict_log.log)), 'replica_uid' : self._replica_uid, '_rev' : self._couch_rev} self._put_doc(doc) diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py index cd051588..2ab07675 100644 --- a/src/leap/soledad/backends/objectstore.py +++ b/src/leap/soledad/backends/objectstore.py @@ -1,9 +1,7 @@ -import uuid from u1db.backends import CommonBackend -from u1db import errors, Document +from u1db import errors, Document, vectorclock from leap.soledad import util as soledadutil - class ObjectStore(CommonBackend): """ A backend for storing u1db data in an object store. @@ -11,12 +9,12 @@ class ObjectStore(CommonBackend): def __init__(self, replica_uid=None): # This initialization method should be called after the connection - # with the database is established, so it can ensure that u1db data is - # configured and up-to-date. + # with the database is established in each implementation, so it can + # ensure that u1db data is configured and up-to-date. self.set_document_factory(Document) self._sync_log = soledadutil.SyncLog() self._transaction_log = soledadutil.TransactionLog() - self._conflict_log = soledadutil.ConflictLog() + self._conflict_log = soledadutil.ConflictLog(self._factory) self._replica_uid = replica_uid self._ensure_u1db_data() @@ -72,8 +70,7 @@ class ObjectStore(CommonBackend): raise errors.RevisionConflict() new_rev = self._allocate_doc_rev(doc.rev) doc.rev = new_rev - self._put_doc(doc) - self._update_gen_and_transaction_log(doc.doc_id) + self._put_and_update_indexes(old_doc, doc) return doc.rev def delete_doc(self, doc): @@ -89,8 +86,7 @@ class ObjectStore(CommonBackend): new_rev = self._allocate_doc_rev(doc.rev) doc.rev = new_rev doc.make_tombstone() - self._put_doc(doc) - self._update_gen_and_transaction_log(doc.doc_id) + self._put_and_update_indexes(old_doc, doc) return new_rev # start of index-related methods: these are not supported by this backend. @@ -117,10 +113,25 @@ class ObjectStore(CommonBackend): # end of index-related methods: these are not supported by this backend. def get_doc_conflicts(self, doc_id): - return [] + self._get_u1db_data() + conflict_docs = self._conflict_log.get_conflicts(doc_id) + if not conflict_docs: + return [] + this_doc = self._get_doc(doc_id) + this_doc.has_conflicts = True + return [this_doc] + list(conflict_docs) def resolve_doc(self, doc, conflicted_doc_revs): - raise NotImplementedError(self.resolve_doc) + cur_doc = self._get_doc(doc.doc_id) + new_rev = self._ensure_maximal_rev(cur_doc.rev, + conflicted_doc_revs) + superseded_revs = set(conflicted_doc_revs) + doc.rev = new_rev + if cur_doc.rev in superseded_revs: + self._put_and_update_indexes(cur_doc, doc) + else: + self._add_conflict(doc.doc_id, new_rev, doc.get_json()) + self._delete_conflicts(doc, superseded_revs) def _get_replica_gen_and_trans_id(self, other_replica_uid): self._get_u1db_data() @@ -142,6 +153,7 @@ class ObjectStore(CommonBackend): other_transaction_id) def _get_transaction_log(self): + self._get_u1db_data() return self._transaction_log.get_transaction_log() #------------------------------------------------------------------------- @@ -157,11 +169,12 @@ class ObjectStore(CommonBackend): return self._transaction_log.get_generation_info() def _has_conflicts(self, doc_id): - # Documents never have conflicts on server. - return False + self._get_u1db_data() + return self._conflict_log.has_conflicts(doc_id) def _put_and_update_indexes(self, old_doc, doc): - # TODO: implement index update + # for now we ignore indexes as this backend is used to store encrypted + # blobs of data in the server. self._put_doc(doc) self._update_gen_and_transaction_log(doc.doc_id) @@ -199,14 +212,7 @@ class ObjectStore(CommonBackend): """ Create u1db data object in store. """ - if self._replica_uid is None: - self._replica_uid = uuid.uuid4().hex - doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID) - doc.content = { 'sync_log' : [], - 'transaction_log' : [], - 'conflict_log' : [], - 'replica_uid' : self._replica_uid } - self._put_doc(doc) + NotImplementedError(self._initialize) def _get_u1db_data(self, u1db_data_doc_id): """ @@ -239,17 +245,19 @@ class ObjectStore(CommonBackend): def _add_conflict(self, doc_id, my_doc_rev, my_content): self._conflict_log.append((doc_id, my_doc_rev, my_content)) + self._set_u1db_data() def _delete_conflicts(self, doc, conflict_revs): deleting = [(doc.doc_id, c_rev) for c_rev in conflict_revs] self._conflict_log.delete_conflicts(deleting) + self._set_u1db_data() doc.has_conflicts = self._has_conflicts(doc.doc_id) def _prune_conflicts(self, doc, doc_vcr): if self._has_conflicts(doc.doc_id): autoresolved = False c_revs_to_prune = [] - for c_doc in self._get_conflicts(doc.doc_id): + for c_doc in self._conflict_log.get_conflicts(doc.doc_id): c_vcr = vectorclock.VectorClockRev(c_doc.rev) if doc_vcr.is_newer(c_vcr): c_revs_to_prune.append(c_doc.rev) @@ -260,5 +268,11 @@ class ObjectStore(CommonBackend): if autoresolved: doc_vcr.increment(self._replica_uid) doc.rev = doc_vcr.as_str() - c = self._db_handle.cursor() - self._delete_conflicts(c, doc, c_revs_to_prune) + self._delete_conflicts(doc, c_revs_to_prune) + + def _force_doc_sync_conflict(self, doc): + my_doc = self._get_doc(doc.doc_id) + self._prune_conflicts(doc, vectorclock.VectorClockRev(doc.rev)) + self._add_conflict(doc.doc_id, my_doc.rev, my_doc.get_json()) + doc.has_conflicts = True + self._put_and_update_indexes(my_doc, doc) -- cgit v1.2.3 From 7c3b6c5c0fa7a8bd1e15a302bfe0d30e347316be Mon Sep 17 00:00:00 2001 From: drebs Date: Mon, 14 Jan 2013 13:23:44 -0200 Subject: CouchSyncTarget works as a LocalSyncTarget. --- src/leap/soledad/backends/couch.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/couch.py b/src/leap/soledad/backends/couch.py index f071cfad..5cde4805 100644 --- a/src/leap/soledad/backends/couch.py +++ b/src/leap/soledad/backends/couch.py @@ -1,7 +1,8 @@ +import sys import uuid from base64 import b64encode, b64decode from u1db import errors -from u1db.remote.http_target import HTTPSyncTarget +from u1db.sync import LocalSyncTarget from couchdb.client import Server, Document as CouchDocument from couchdb.http import ResourceNotFound from leap.soledad.backends.objectstore import ObjectStore @@ -108,7 +109,6 @@ class CouchDatabase(ObjectStore): def sync(self, url, creds=None, autocreate=True): from u1db.sync import Synchronizer - from u1db.remote.http_target import CouchSyncTarget return Synchronizer(self, CouchSyncTarget(url, creds=creds)).sync( autocreate=autocreate) @@ -150,7 +150,8 @@ class CouchDatabase(ObjectStore): def delete_database(self): del(self._server[self._dbname]) -class CouchSyncTarget(HTTPSyncTarget): + +class CouchSyncTarget(LocalSyncTarget): def get_sync_info(self, source_replica_uid): source_gen, source_trans_id = self._db._get_replica_gen_and_trans_id( @@ -168,4 +169,3 @@ class CouchSyncTarget(HTTPSyncTarget): source_replica_uid, source_replica_generation, source_replica_transaction_id) - -- cgit v1.2.3 From ac2770e4bca995ad40b6b98013c7a11e01a6b46f Mon Sep 17 00:00:00 2001 From: drebs Date: Mon, 14 Jan 2013 13:24:11 -0200 Subject: Cleanup of imports. --- src/leap/soledad/backends/sqlcipher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py index 3b03bc95..53758397 100644 --- a/src/leap/soledad/backends/sqlcipher.py +++ b/src/leap/soledad/backends/sqlcipher.py @@ -27,7 +27,7 @@ import sys import time import uuid -from u1db.backends import CommonBackend, CommonSyncTarget +from u1db.backends import CommonBackend from u1db.backends.sqlite_backend import SQLitePartialExpandDatabase from u1db import ( Document, -- cgit v1.2.3 From 7759f6c6b862e792adb4a005d9ec27e313fb6e06 Mon Sep 17 00:00:00 2001 From: drebs Date: Tue, 15 Jan 2013 18:52:56 -0200 Subject: Refactor and fix ObjectStore. --- src/leap/soledad/backends/objectstore.py | 183 +++++++++++++++++++++++++++++-- 1 file changed, 171 insertions(+), 12 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py index 2ab07675..03694532 100644 --- a/src/leap/soledad/backends/objectstore.py +++ b/src/leap/soledad/backends/objectstore.py @@ -1,6 +1,5 @@ from u1db.backends import CommonBackend from u1db import errors, Document, vectorclock -from leap.soledad import util as soledadutil class ObjectStore(CommonBackend): """ @@ -12,9 +11,9 @@ class ObjectStore(CommonBackend): # with the database is established in each implementation, so it can # ensure that u1db data is configured and up-to-date. self.set_document_factory(Document) - self._sync_log = soledadutil.SyncLog() - self._transaction_log = soledadutil.TransactionLog() - self._conflict_log = soledadutil.ConflictLog(self._factory) + self._sync_log = SyncLog() + self._transaction_log = TransactionLog() + self._conflict_log = ConflictLog(self._factory) self._replica_uid = replica_uid self._ensure_u1db_data() @@ -139,19 +138,18 @@ class ObjectStore(CommonBackend): def _set_replica_gen_and_trans_id(self, other_replica_uid, other_generation, other_transaction_id): - self._get_u1db_data() + return self._do_set_replica_gen_and_trans_id( + other_replica_uid, + other_generation, + other_transaction_id) + + def _do_set_replica_gen_and_trans_id(self, other_replica_uid, + other_generation, other_transaction_id): self._sync_log.set_replica_gen_and_trans_id(other_replica_uid, other_generation, other_transaction_id) self._set_u1db_data() - def _do_set_replica_gen_and_trans_id(self, other_replica_uid, - other_generation, other_transaction_id): - return self._set_replica_gen_and_trans_id( - other_replica_uid, - other_generation, - other_transaction_id) - def _get_transaction_log(self): self._get_u1db_data() return self._transaction_log.get_transaction_log() @@ -276,3 +274,164 @@ class ObjectStore(CommonBackend): self._add_conflict(doc.doc_id, my_doc.rev, my_doc.get_json()) doc.has_conflicts = True self._put_and_update_indexes(my_doc, doc) + + +#---------------------------------------------------------------------------- +# U1DB's Transaction, Sync, and conflict Logs +#---------------------------------------------------------------------------- + +class SimpleList(object): + def __init__(self): + self._log = [] + + def _set_log(self, log): + self._log = log + + def _get_log(self): + return self._log + + log = property( + _get_log, _set_log, doc="Log contents.") + + def append(self, msg): + self._log.append(msg) + + def reduce(self, func, initializer=None): + return reduce(func, self._log, initializer) + + def map(self, func): + return map(func, self._get_log()) + + def filter(self, func): + return filter(func, self._get_log()) + + +class TransactionLog(SimpleList): + """ + An ordered list of (generation, doc_id, transaction_id) tuples. + """ + + def _set_log(self, log): + self._log = log + + def _get_log(self): + return sorted(self._log, reverse=True) + + log = property( + _get_log, _set_log, doc="Log contents.") + + def get_generation(self): + """ + Return the current generation. + """ + gens = self.map(lambda x: x[0]) + if not gens: + return 0 + return max(gens) + + def get_generation_info(self): + """ + Return the current generation and transaction id. + """ + if not self._log: + return(0, '') + info = self.map(lambda x: (x[0], x[2])) + return reduce(lambda x, y: x if (x[0] > y[0]) else y, info) + + def get_trans_id_for_gen(self, gen): + """ + Get the transaction id corresponding to a particular generation. + """ + log = self.reduce(lambda x, y: y if y[0] == gen else x) + if log is None: + return None + return log[2] + + def whats_changed(self, old_generation): + """ + Return a list of documents that have changed since old_generation. + """ + results = self.filter(lambda x: x[0] > old_generation) + seen = set() + changes = [] + newest_trans_id = '' + for generation, doc_id, trans_id in results: + if doc_id not in seen: + changes.append((doc_id, generation, trans_id)) + seen.add(doc_id) + if changes: + cur_gen = changes[0][1] # max generation + newest_trans_id = changes[0][2] + changes.reverse() + else: + results = self._get_log() + if not results: + cur_gen = 0 + newest_trans_id = '' + else: + cur_gen, _, newest_trans_id = results[0] + + return cur_gen, newest_trans_id, changes + + + def get_transaction_log(self): + """ + Return only a list of (doc_id, transaction_id) + """ + return map(lambda x: (x[1], x[2]), sorted(self._log)) + + +class SyncLog(SimpleList): + """ + A list of (replica_id, generation, transaction_id) tuples. + """ + + def find_by_replica_uid(self, replica_uid): + if not self._get_log(): + return () + return self.reduce(lambda x, y: y if y[0] == replica_uid else x) + + def get_replica_gen_and_trans_id(self, other_replica_uid): + """ + Return the last known generation and transaction id for the other db + replica. + """ + info = self.find_by_replica_uid(other_replica_uid) + if not info: + return (0, '') + return (info[1], info[2]) + + def set_replica_gen_and_trans_id(self, other_replica_uid, + other_generation, other_transaction_id): + """ + Set the last-known generation and transaction id for the other + database replica. + """ + self._log = self.filter(lambda x: x[0] != other_replica_uid) + self.append((other_replica_uid, other_generation, + other_transaction_id)) + +class ConflictLog(SimpleList): + """ + A list of (doc_id, my_doc_rev, my_content) tuples. + """ + + def __init__(self, factory): + super(ConflictLog, self).__init__() + self._factory = factory + + def delete_conflicts(self, conflicts): + for conflict in conflicts: + self._log = self.filter(lambda x: + x[0] != conflict[0] or x[1] != conflict[1]) + + def get_conflicts(self, doc_id): + conflicts = self.filter(lambda x: x[0] == doc_id) + if not conflicts: + return [] + return reversed(map(lambda x: self._factory(doc_id, x[1], x[2]), + conflicts)) + + def has_conflicts(self, doc_id): + return bool(self.filter(lambda x: x[0] == doc_id)) + -- cgit v1.2.3 From e83572610574e8d3d96c0117fdb45764ffbeb538 Mon Sep 17 00:00:00 2001 From: drebs Date: Wed, 16 Jan 2013 09:55:27 -0200 Subject: Fix copy_database for couch tests. --- src/leap/soledad/backends/couch.py | 6 ++---- src/leap/soledad/backends/objectstore.py | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/couch.py b/src/leap/soledad/backends/couch.py index 5cde4805..78026af8 100644 --- a/src/leap/soledad/backends/couch.py +++ b/src/leap/soledad/backends/couch.py @@ -41,10 +41,8 @@ class CouchDatabase(ObjectStore): #------------------------------------------------------------------------- def _get_doc(self, doc_id, check_for_conflicts=False): - """Get just the document content, without fancy handling. - - Conflicts do not happen on server side, so there's no need to check - for them. + """ + Get just the document content, without fancy handling. """ cdoc = self._database.get(doc_id) if cdoc is None: diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py index 03694532..2ddd4c79 100644 --- a/src/leap/soledad/backends/objectstore.py +++ b/src/leap/soledad/backends/objectstore.py @@ -212,7 +212,7 @@ class ObjectStore(CommonBackend): """ NotImplementedError(self._initialize) - def _get_u1db_data(self, u1db_data_doc_id): + def _get_u1db_data(self): """ Fetch u1db configuration data from backend storage. """ -- cgit v1.2.3 From 8ee7ba49fd3ae902fd0a9d8a3a80b7b6a9ab999b Mon Sep 17 00:00:00 2001 From: drebs Date: Wed, 16 Jan 2013 11:04:42 -0200 Subject: Couch backend passes test_sync without indexes. --- src/leap/soledad/backends/objectstore.py | 60 ++++++++++++++++++++------------ 1 file changed, 37 insertions(+), 23 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py index 2ddd4c79..b6523336 100644 --- a/src/leap/soledad/backends/objectstore.py +++ b/src/leap/soledad/backends/objectstore.py @@ -277,45 +277,59 @@ class ObjectStore(CommonBackend): #---------------------------------------------------------------------------- -# U1DB's Transaction, Sync, and conflict Logs +# U1DB's TransactionLog, SyncLog, ConflictLog, and Index #---------------------------------------------------------------------------- class SimpleList(object): def __init__(self): - self._log = [] + self._data = [] - def _set_log(self, log): - self._log = log + def _set_data(self, data): + self._data = data - def _get_log(self): - return self._log + def _get_data(self): + return self._data - log = property( - _get_log, _set_log, doc="Log contents.") + data = property( + _get_data, _set_data, doc="List contents.") def append(self, msg): - self._log.append(msg) + self._data.append(msg) def reduce(self, func, initializer=None): - return reduce(func, self._log, initializer) + return reduce(func, self._data, initializer) def map(self, func): - return map(func, self._get_log()) + return map(func, self._get_data()) def filter(self, func): - return filter(func, self._get_log()) + return filter(func, self._get_data()) + + +class SimpleLog(SimpleList): + + def _set_log(self, log): + self._data = log + def _get_log(self): + return self._data + + log = property( + _get_log, _set_log, doc="Log contents.") -class TransactionLog(SimpleList): + +class TransactionLog(SimpleLog): """ An ordered list of (generation, doc_id, transaction_id) tuples. """ def _set_log(self, log): - self._log = log + self._data = log - def _get_log(self): - return sorted(self._log, reverse=True) + def _get_data(self, reverse=True): + return sorted(self._data, reverse=reverse) + + _get_log = _get_data log = property( _get_log, _set_log, doc="Log contents.") @@ -333,7 +347,7 @@ class TransactionLog(SimpleList): """ Return the current generation and transaction id. """ - if not self._log: + if not self._get_log(): return(0, '') info = self.map(lambda x: (x[0], x[2])) return reduce(lambda x, y: x if (x[0] > y[0]) else y, info) @@ -378,10 +392,10 @@ class TransactionLog(SimpleList): """ Return only a list of (doc_id, transaction_id) """ - return map(lambda x: (x[1], x[2]), sorted(self._log)) + return map(lambda x: (x[1], x[2]), sorted(self._get_log(reverse=False))) -class SyncLog(SimpleList): +class SyncLog(SimpleLog): """ A list of (replica_id, generation, transaction_id) tuples. """ @@ -407,11 +421,11 @@ class SyncLog(SimpleList): Set the last-known generation and transaction id for the other database replica. """ - self._log = self.filter(lambda x: x[0] != other_replica_uid) + self._set_log(self.filter(lambda x: x[0] != other_replica_uid)) self.append((other_replica_uid, other_generation, other_transaction_id)) -class ConflictLog(SimpleList): +class ConflictLog(SimpleLog): """ A list of (doc_id, my_doc_rev, my_content) tuples. """ @@ -422,8 +436,8 @@ class ConflictLog(SimpleList): def delete_conflicts(self, conflicts): for conflict in conflicts: - self._log = self.filter(lambda x: - x[0] != conflict[0] or x[1] != conflict[1]) + self._set_log(self.filter(lambda x: + x[0] != conflict[0] or x[1] != conflict[1])) def get_conflicts(self, doc_id): conflicts = self.filter(lambda x: x[0] == doc_id) -- cgit v1.2.3 From 69173d511a99126fe6508d87e8a25a60d3f1f927 Mon Sep 17 00:00:00 2001 From: drebs Date: Thu, 17 Jan 2013 19:20:04 -0200 Subject: Add tests for verifying if sqlcipher db is encrypted. --- src/leap/soledad/backends/sqlcipher.py | 38 +++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 14 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py index 53758397..6711aa86 100644 --- a/src/leap/soledad/backends/sqlcipher.py +++ b/src/leap/soledad/backends/sqlcipher.py @@ -16,25 +16,15 @@ """A U1DB implementation that uses SQLCipher as its persistence layer.""" -import errno import os -try: - import simplejson as json -except ImportError: - import json # noqa -from sqlite3 import dbapi2 -import sys +from sqlite3 import dbapi2, DatabaseError import time -import uuid -from u1db.backends import CommonBackend from u1db.backends.sqlite_backend import SQLitePartialExpandDatabase from u1db import ( Document, errors, - query_parser, - vectorclock, - ) +) def open(path, password, create, document_factory=None): @@ -50,11 +40,17 @@ def open(path, password, create, document_factory=None): parameters as Document.__init__. :return: An instance of Database. """ - from u1db.backends import sqlite_backend return SQLCipherDatabase.open_database( path, password, create=create, document_factory=document_factory) +class DatabaseIsNotEncrypted(Exception): + """ + Exception raised when trying to open non-encrypted databases. + """ + pass + + class SQLCipherDatabase(SQLitePartialExpandDatabase): """A U1DB implementation that uses SQLCipher as its persistence layer.""" @@ -67,12 +63,26 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): def __init__(self, sqlite_file, password, document_factory=None): """Create a new sqlcipher file.""" + self._check_if_db_is_encrypted(sqlite_file) self._db_handle = dbapi2.connect(sqlite_file) SQLCipherDatabase.set_pragma_key(self._db_handle, password) self._real_replica_uid = None self._ensure_schema() self._factory = document_factory or Document + def _check_if_db_is_encrypted(self, sqlite_file): + if not os.path.exists(sqlite_file): + return + else: + try: + # try to open an encrypted database with the regular u1db backend + # should raise a DatabaseError exception. + SQLitePartialExpandDatabase(sqlite_file) + raise DatabaseIsNotEncrypted() + except DatabaseError: + pass + + @classmethod def _open_database(cls, sqlite_file, password, document_factory=None): if not os.path.isfile(sqlite_file): @@ -122,4 +132,4 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): SQLCipherDatabase._sqlite_registry[klass._index_storage_value] = klass -SQLCipherDatabase.register_implementation(SQLCipherDatabase) +SQLCipherDatabase.register_implementation(SQLCipherDatabase) \ No newline at end of file -- cgit v1.2.3 From c1e460cc15523a0138dde1ece9d584554d99c04b Mon Sep 17 00:00:00 2001 From: drebs Date: Sun, 20 Jan 2013 11:56:37 -0200 Subject: Fix SQLCipher implementation registering. --- src/leap/soledad/backends/sqlcipher.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py index 6711aa86..f7dc6c0b 100644 --- a/src/leap/soledad/backends/sqlcipher.py +++ b/src/leap/soledad/backends/sqlcipher.py @@ -61,6 +61,7 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): def set_pragma_key(cls, db_handle, key): db_handle.cursor().execute("PRAGMA key = '%s'" % key) + def __init__(self, sqlite_file, password, document_factory=None): """Create a new sqlcipher file.""" self._check_if_db_is_encrypted(sqlite_file) @@ -70,6 +71,7 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): self._ensure_schema() self._factory = document_factory or Document + def _check_if_db_is_encrypted(self, sqlite_file): if not os.path.exists(sqlite_file): return @@ -108,6 +110,7 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): return SQLCipherDatabase._sqlite_registry[v]( sqlite_file, password, document_factory=document_factory) + @classmethod def open_database(cls, sqlite_file, password, create, backend_cls=None, document_factory=None): @@ -123,6 +126,7 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): return backend_cls(sqlite_file, password, document_factory=document_factory) + @staticmethod def register_implementation(klass): """Register that we implement an SQLCipherDatabase. @@ -132,4 +136,5 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): SQLCipherDatabase._sqlite_registry[klass._index_storage_value] = klass -SQLCipherDatabase.register_implementation(SQLCipherDatabase) \ No newline at end of file +SQLiteDatabase.register_implementation(SQLCipherDatabase) + -- cgit v1.2.3 From eae9fd4b5d93845fc708abb722714ebf9d003418 Mon Sep 17 00:00:00 2001 From: drebs Date: Sun, 20 Jan 2013 16:29:53 -0200 Subject: SQLCipher backend syncs using a LeapSyncTarget. --- src/leap/soledad/backends/sqlcipher.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) (limited to 'src/leap/soledad/backends') diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py index f7dc6c0b..3d03449e 100644 --- a/src/leap/soledad/backends/sqlcipher.py +++ b/src/leap/soledad/backends/sqlcipher.py @@ -20,14 +20,17 @@ import os from sqlite3 import dbapi2, DatabaseError import time -from u1db.backends.sqlite_backend import SQLitePartialExpandDatabase +from u1db.backends.sqlite_backend import ( + SQLiteDatabase, + SQLitePartialExpandDatabase, +) from u1db import ( Document, errors, ) -def open(path, password, create, document_factory=None): +def open(path, password, create=True, document_factory=None): """Open a database at the given location. Will raise u1db.errors.DatabaseDoesNotExist if create=False and the @@ -127,13 +130,15 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase): document_factory=document_factory) - @staticmethod - def register_implementation(klass): - """Register that we implement an SQLCipherDatabase. - - The attribute _index_storage_value will be used as the lookup key. + def sync(self, url, creds=None, autocreate=True, soledad=None): + """ + Synchronize encrypted documents with remote replica exposed at url. """ - SQLCipherDatabase._sqlite_registry[klass._index_storage_value] = klass + from u1db.sync import Synchronizer + from leap.soledad.backends.leap_backend import LeapSyncTarget + return Synchronizer(self, LeapSyncTarget(url, creds=creds), + soledad=self._soledad).sync( + autocreate=autocreate) SQLiteDatabase.register_implementation(SQLCipherDatabase) -- cgit v1.2.3