summaryrefslogtreecommitdiff
path: root/testing/tests/sync
diff options
context:
space:
mode:
authordrebs <drebs@riseup.net>2017-09-17 12:08:25 -0300
committerdrebs <drebs@riseup.net>2017-09-17 15:50:55 -0300
commitcfff46ff9becdbe5cf48816870e625ed253ecc57 (patch)
tree8d239e4499f559d86ed17ea3632008303b25d485 /testing/tests/sync
parentf29abe28bd778838626d12fcabe3980a8ce4fa8c (diff)
[refactor] move tests to root of repository
Tests entrypoint was in a testing/ subfolder in the root of the repository. This was made mainly because we had some common files for tests and we didn't want to ship them (files in testing/test_soledad, which is itself a python package. This sometimes causes errors when loading tests (it seems setuptools is confused with having one python package in a subdirectory of another). This commit moves the tests entrypoint to the root of the repository. Closes: #8952
Diffstat (limited to 'testing/tests/sync')
-rw-r--r--testing/tests/sync/__init__.py0
-rw-r--r--testing/tests/sync/test_sqlcipher_sync.py719
-rw-r--r--testing/tests/sync/test_sync.py233
-rw-r--r--testing/tests/sync/test_sync_mutex.py133
-rw-r--r--testing/tests/sync/test_sync_target.py968
5 files changed, 0 insertions, 2053 deletions
diff --git a/testing/tests/sync/__init__.py b/testing/tests/sync/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/testing/tests/sync/__init__.py
+++ /dev/null
diff --git a/testing/tests/sync/test_sqlcipher_sync.py b/testing/tests/sync/test_sqlcipher_sync.py
deleted file mode 100644
index 26f63a40..00000000
--- a/testing/tests/sync/test_sqlcipher_sync.py
+++ /dev/null
@@ -1,719 +0,0 @@
-# -*- coding: utf-8 -*-
-# test_sqlcipher.py
-# Copyright (C) 2013-2016 LEAP
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-Test sqlcipher backend sync.
-"""
-import os
-
-from uuid import uuid4
-
-from testscenarios import TestWithScenarios
-
-from leap.soledad.common.l2db import sync
-from leap.soledad.common.l2db import vectorclock
-from leap.soledad.common.l2db import errors
-
-from leap.soledad.client.http_target import SoledadHTTPSyncTarget
-
-from test_soledad import u1db_tests as tests
-from test_soledad.util import SQLCIPHER_SCENARIOS
-from test_soledad.util import make_soledad_app
-from test_soledad.util import soledad_sync_target
-from test_soledad.util import BaseSoledadTest
-
-
-# -----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_sync`.
-# -----------------------------------------------------------------------------
-
-def sync_via_synchronizer_and_soledad(test, db_source, db_target,
- trace_hook=None,
- trace_hook_shallow=None):
- if trace_hook:
- test.skipTest("full trace hook unsupported over http")
- path = test._http_at[db_target]
- target = SoledadHTTPSyncTarget.connect(
- test.getURL(path), test._soledad._crypto)
- target.set_token_credentials('user-uuid', 'auth-token')
- if trace_hook_shallow:
- target._set_trace_hook_shallow(trace_hook_shallow)
- return sync.Synchronizer(db_source, target).sync()
-
-
-def sync_via_synchronizer(test, db_source, db_target,
- trace_hook=None,
- trace_hook_shallow=None):
- target = db_target.get_sync_target()
- trace_hook = trace_hook or trace_hook_shallow
- if trace_hook:
- target._set_trace_hook(trace_hook)
- return sync.Synchronizer(db_source, target).sync()
-
-
-sync_scenarios = []
-for name, scenario in SQLCIPHER_SCENARIOS:
- scenario['do_sync'] = sync_via_synchronizer
- sync_scenarios.append((name, scenario))
-
-
-class SQLCipherDatabaseSyncTests(
- TestWithScenarios,
- tests.DatabaseBaseTests,
- BaseSoledadTest):
-
- """
- Test for succesfull sync between SQLCipher and LeapBackend.
-
- Some of the tests in this class had to be adapted because the remote
- backend always receive encrypted content, and so it can not rely on
- document's content comparison to try to autoresolve conflicts.
- """
-
- scenarios = sync_scenarios
-
- def setUp(self):
- self._use_tracking = {}
- super(tests.DatabaseBaseTests, self).setUp()
-
- def create_database(self, replica_uid, sync_role=None):
- if replica_uid == 'test' and sync_role is None:
- # created up the chain by base class but unused
- return None
- db = self.create_database_for_role(replica_uid, sync_role)
- if sync_role:
- self._use_tracking[db] = (replica_uid, sync_role)
- self.addCleanup(db.close)
- return db
-
- def create_database_for_role(self, replica_uid, sync_role):
- # hook point for reuse
- return tests.DatabaseBaseTests.create_database(self, replica_uid)
-
- def sync(self, db_from, db_to, trace_hook=None,
- trace_hook_shallow=None):
- from_name, from_sync_role = self._use_tracking[db_from]
- to_name, to_sync_role = self._use_tracking[db_to]
- if from_sync_role not in ('source', 'both'):
- raise Exception("%s marked for %s use but used as source" %
- (from_name, from_sync_role))
- if to_sync_role not in ('target', 'both'):
- raise Exception("%s marked for %s use but used as target" %
- (to_name, to_sync_role))
- return self.do_sync(self, db_from, db_to, trace_hook,
- trace_hook_shallow)
-
- def assertLastExchangeLog(self, db, expected):
- log = getattr(db, '_last_exchange_log', None)
- if log is None:
- return
- self.assertEqual(expected, log)
-
- def copy_database(self, db, sync_role=None):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
- # IS THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST
- # THAT WE CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS
- # RATHER THAN CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND
- # NINJA TO YOUR HOUSE.
- db_copy = tests.DatabaseBaseTests.copy_database(self, db)
- name, orig_sync_role = self._use_tracking[db]
- self._use_tracking[db_copy] = (name + '(copy)', sync_role or
- orig_sync_role)
- return db_copy
-
- def test_sync_tracks_db_generation_of_other(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.assertEqual(0, self.sync(self.db1, self.db2))
- self.assertEqual(
- (0, ''), self.db1._get_replica_gen_and_trans_id('test2'))
- self.assertEqual(
- (0, ''), self.db2._get_replica_gen_and_trans_id('test1'))
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [], 'last_known_gen': 0},
- 'return':
- {'docs': [], 'last_gen': 0}})
-
- def test_sync_autoresolves(self):
- """
- Test for sync autoresolve remote.
-
- This test was adapted because the remote database receives encrypted
- content and so it can't compare documents contents to autoresolve.
- """
- # The remote database can't autoresolve conflicts based on magic
- # content convergence, so we modify this test to leave the possibility
- # of the remode document ending up in conflicted state.
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc')
- rev1 = doc1.rev
- doc2 = self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc')
- rev2 = doc2.rev
- self.sync(self.db1, self.db2)
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- # if remote content is in conflicted state, then document revisions
- # will be different.
- # self.assertEqual(doc.rev, self.db2.get_doc('doc').rev)
- v = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(v.is_newer(vectorclock.VectorClockRev(rev1)))
- self.assertTrue(v.is_newer(vectorclock.VectorClockRev(rev2)))
-
- def test_sync_autoresolves_moar(self):
- """
- Test for sync autoresolve local.
-
- This test was adapted to decrypt remote content before assert.
- """
- # here we test that when a database that has a conflicted document is
- # the source of a sync, and the target database has a revision of the
- # conflicted document that is newer than the source database's, and
- # that target's database's document's content is the same as the
- # source's document's conflict's, the source's document's conflict gets
- # autoresolved, and the source's document's revision bumped.
- #
- # idea is as follows:
- # A B
- # a1 -
- # `------->
- # a1 a1
- # v v
- # a2 a1b1
- # `------->
- # a1b1+a2 a1b1
- # v
- # a1b1+a2 a1b2 (a1b2 has same content as a2)
- # `------->
- # a3b2 a1b2 (autoresolved)
- # `------->
- # a3b2 a3b2
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc')
- self.sync(self.db1, self.db2)
- for db, content in [(self.db1, '{}'), (self.db2, '{"hi": 42}')]:
- doc = db.get_doc('doc')
- doc.set_json(content)
- db.put_doc(doc)
- self.sync(self.db1, self.db2)
- # db1 and db2 now both have a doc of {hi:42}, but db1 has a conflict
- doc = self.db1.get_doc('doc')
- rev1 = doc.rev
- self.assertTrue(doc.has_conflicts)
- # set db2 to have a doc of {} (same as db1 before the conflict)
- doc = self.db2.get_doc('doc')
- doc.set_json('{}')
- self.db2.put_doc(doc)
- rev2 = doc.rev
- # sync it across
- self.sync(self.db1, self.db2)
- # tadaa!
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- vec1 = vectorclock.VectorClockRev(rev1)
- vec2 = vectorclock.VectorClockRev(rev2)
- vec3 = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(vec3.is_newer(vec1))
- self.assertTrue(vec3.is_newer(vec2))
- # because the conflict is on the source, sync it another time
- self.sync(self.db1, self.db2)
- # make sure db2 now has the exact same thing
- doc1 = self.db1.get_doc('doc')
- self.assertGetEncryptedDoc(
- self.db2,
- doc1.doc_id, doc1.rev, doc1.get_json(), False)
-
- def test_sync_autoresolves_moar_backwards(self):
- # here we would test that when a database that has a conflicted
- # document is the target of a sync, and the source database has a
- # revision of the conflicted document that is newer than the target
- # database's, and that source's database's document's content is the
- # same as the target's document's conflict's, the target's document's
- # conflict gets autoresolved, and the document's revision bumped.
- #
- # Despite that, in Soledad we suppose that the server never syncs, so
- # it never has conflicted documents. Also, if it had, convergence
- # would not be possible by checking document's contents because they
- # would be encrypted in server.
- #
- # Therefore we suppress this test.
- pass
-
- def test_sync_autoresolves_moar_backwards_three(self):
- # here we would test that when a database that has a conflicted
- # document is the target of a sync, and the source database has a
- # revision of the conflicted document that is newer than the target
- # database's, and that source's database's document's content is the
- # same as the target's document's conflict's, the target's document's
- # conflict gets autoresolved, and the document's revision bumped.
- #
- # We use the same reasoning from the last test to suppress this one.
- pass
-
- def test_sync_pulling_doesnt_update_other_if_changed(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db2.create_doc_from_json(tests.simple_doc)
- # After the local side has sent its list of docs, before we start
- # receiving the "targets" response, we update the local database with a
- # new record.
- # When we finish synchronizing, we can notice that something locally
- # was updated, and we cannot tell c2 our new updated generation
-
- def before_get_docs(state):
- if state != 'before get_docs':
- return
- self.db1.create_doc_from_json(tests.simple_doc)
-
- self.assertEqual(0, self.sync(self.db1, self.db2,
- trace_hook=before_get_docs))
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [], 'last_known_gen': 0},
- 'return':
- {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- # c2 should not have gotten a '_record_sync_info' call, because the
- # local database had been updated more than just by the messages
- # returned from c2.
- self.assertEqual(
- (0, ''), self.db2._get_replica_gen_and_trans_id('test1'))
-
- def test_sync_doesnt_update_other_if_nothing_pulled(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc)
-
- def no_record_sync_info(state):
- if state != 'record_sync_info':
- return
- self.fail('SyncTarget.record_sync_info was called')
- self.assertEqual(1, self.sync(self.db1, self.db2,
- trace_hook_shallow=no_record_sync_info))
- self.assertEqual(
- 1,
- self.db2._get_replica_gen_and_trans_id(self.db1._replica_uid)[0])
-
- def test_sync_ignores_convergence(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- doc = self.db1.create_doc_from_json(tests.simple_doc)
- self.db3 = self.create_database('test3', 'target')
- self.assertEqual(1, self.sync(self.db1, self.db3))
- self.assertEqual(0, self.sync(self.db2, self.db3))
- self.assertEqual(1, self.sync(self.db1, self.db2))
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
-
- def test_sync_ignores_superseded(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- doc = self.db1.create_doc_from_json(tests.simple_doc)
- doc_rev1 = doc.rev
- self.db3 = self.create_database('test3', 'target')
- self.sync(self.db1, self.db3)
- self.sync(self.db2, self.db3)
- new_content = '{"key": "altval"}'
- doc.set_json(new_content)
- self.db1.put_doc(doc)
- doc_rev2 = doc.rev
- self.sync(self.db2, self.db1)
- self.assertLastExchangeLog(self.db1,
- {'receive':
- {'docs': [(doc.doc_id, doc_rev1)],
- 'source_uid': 'test2',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return':
- {'docs': [(doc.doc_id, doc_rev2)],
- 'last_gen': 2}})
- self.assertGetDoc(self.db1, doc.doc_id, doc_rev2, new_content, False)
-
- def test_sync_sees_remote_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(tests.simple_doc)
- doc_id = doc1.doc_id
- doc1_rev = doc1.rev
- self.db1.create_index('test-idx', 'key')
- new_doc = '{"key": "altval"}'
- doc2 = self.db2.create_doc_from_json(new_doc, doc_id=doc_id)
- doc2_rev = doc2.rev
- self.assertTransactionLog([doc1.doc_id], self.db1)
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc_id, doc1_rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return':
- {'docs': [(doc_id, doc2_rev)],
- 'last_gen': 1}})
- self.assertTransactionLog([doc_id, doc_id], self.db1)
- self.assertGetDoc(self.db1, doc_id, doc2_rev, new_doc, True)
- self.assertGetDoc(self.db2, doc_id, doc2_rev, new_doc, False)
- from_idx = self.db1.get_from_index('test-idx', 'altval')[0]
- self.assertEqual(doc2.doc_id, from_idx.doc_id)
- self.assertEqual(doc2.rev, from_idx.rev)
- self.assertTrue(from_idx.has_conflicts)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_sees_remote_delete_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(tests.simple_doc)
- doc_id = doc1.doc_id
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- doc2 = self.make_document(doc1.doc_id, doc1.rev, doc1.get_json())
- new_doc = '{"key": "altval"}'
- doc1.set_json(new_doc)
- self.db1.put_doc(doc1)
- self.db2.delete_doc(doc2)
- self.assertTransactionLog([doc_id, doc_id], self.db1)
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc_id, doc1.rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [(doc_id, doc2.rev)],
- 'last_gen': 2}})
- self.assertTransactionLog([doc_id, doc_id, doc_id], self.db1)
- self.assertGetDocIncludeDeleted(self.db1, doc_id, doc2.rev, None, True)
- self.assertGetDocIncludeDeleted(
- self.db2, doc_id, doc2.rev, None, False)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_local_race_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db1.create_doc_from_json(tests.simple_doc)
- doc_id = doc.doc_id
- doc1_rev = doc.rev
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- content1 = '{"key": "localval"}'
- content2 = '{"key": "altval"}'
- doc.set_json(content2)
- self.db2.put_doc(doc)
- doc2_rev2 = doc.rev
- triggered = []
-
- def after_whatschanged(state):
- if state != 'after whats_changed':
- return
- triggered.append(True)
- doc = self.make_document(doc_id, doc1_rev, content1)
- self.db1.put_doc(doc)
-
- self.sync(self.db1, self.db2, trace_hook=after_whatschanged)
- self.assertEqual([True], triggered)
- self.assertGetDoc(self.db1, doc_id, doc2_rev2, content2, True)
- from_idx = self.db1.get_from_index('test-idx', 'altval')[0]
- self.assertEqual(doc.doc_id, from_idx.doc_id)
- self.assertEqual(doc.rev, from_idx.rev)
- self.assertTrue(from_idx.has_conflicts)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
- self.assertEqual([], self.db1.get_from_index('test-idx', 'localval'))
-
- def test_sync_propagates_deletes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- doc1 = self.db1.create_doc_from_json(tests.simple_doc)
- doc_id = doc1.doc_id
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- self.db2.create_index('test-idx', 'key')
- self.db3 = self.create_database('test3', 'target')
- self.sync(self.db1, self.db3)
- self.db1.delete_doc(doc1)
- deleted_rev = doc1.rev
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [], 'last_gen': 2}})
- self.assertGetDocIncludeDeleted(
- self.db1, doc_id, deleted_rev, None, False)
- self.assertGetDocIncludeDeleted(
- self.db2, doc_id, deleted_rev, None, False)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
- self.assertEqual([], self.db2.get_from_index('test-idx', 'value'))
- self.sync(self.db2, self.db3)
- self.assertLastExchangeLog(self.db3,
- {'receive':
- {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test2',
- 'source_gen': 2,
- 'last_known_gen': 0},
- 'return':
- {'docs': [], 'last_gen': 2}})
- self.assertGetDocIncludeDeleted(
- self.db3, doc_id, deleted_rev, None, False)
-
- def test_sync_propagates_deletes_2(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json('{"a": "1"}', doc_id='the-doc')
- self.sync(self.db1, self.db2)
- doc1_2 = self.db2.get_doc('the-doc')
- self.db2.delete_doc(doc1_2)
- self.sync(self.db1, self.db2)
- self.assertGetDocIncludeDeleted(
- self.db1, 'the-doc', doc1_2.rev, None, False)
-
- def test_sync_detects_identical_replica_uid(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test1', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc1')
- self.assertRaises(
- errors.InvalidReplicaUID, self.sync, self.db1, self.db2)
-
- def test_optional_sync_preserve_json(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- cont1 = '{ "a": 2 }'
- cont2 = '{ "b":3}'
- self.db1.create_doc_from_json(cont1, doc_id="1")
- self.db2.create_doc_from_json(cont2, doc_id="2")
- self.sync(self.db1, self.db2)
- self.assertEqual(cont1, self.db2.get_doc("1").get_json())
- self.assertEqual(cont2, self.db1.get_doc("2").get_json())
-
- def test_sync_propagates_resolution(self):
- """
- Test if synchronization propagates resolution.
-
- This test was adapted to decrypt remote content before assert.
- """
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- doc1 = self.db1.create_doc_from_json('{"a": 1}', doc_id='the-doc')
- db3 = self.create_database('test3', 'both')
- self.sync(self.db2, self.db1)
- self.assertEqual(
- self.db1._get_generation_info(),
- self.db2._get_replica_gen_and_trans_id(self.db1._replica_uid))
- self.assertEqual(
- self.db2._get_generation_info(),
- self.db1._get_replica_gen_and_trans_id(self.db2._replica_uid))
- self.sync(db3, self.db1)
- # update on 2
- doc2 = self.make_document('the-doc', doc1.rev, '{"a": 2}')
- self.db2.put_doc(doc2)
- self.sync(self.db2, db3)
- self.assertEqual(db3.get_doc('the-doc').rev, doc2.rev)
- # update on 1
- doc1.set_json('{"a": 3}')
- self.db1.put_doc(doc1)
- # conflicts
- self.sync(self.db2, self.db1)
- self.sync(db3, self.db1)
- self.assertTrue(self.db2.get_doc('the-doc').has_conflicts)
- self.assertTrue(db3.get_doc('the-doc').has_conflicts)
- # resolve
- conflicts = self.db2.get_doc_conflicts('the-doc')
- doc4 = self.make_document('the-doc', None, '{"a": 4}')
- revs = [doc.rev for doc in conflicts]
- self.db2.resolve_doc(doc4, revs)
- doc2 = self.db2.get_doc('the-doc')
- self.assertEqual(doc4.get_json(), doc2.get_json())
- self.assertFalse(doc2.has_conflicts)
- self.sync(self.db2, db3)
- doc3 = db3.get_doc('the-doc')
-
- self.assertEqual(doc4.get_json(), doc3.get_json())
- self.assertFalse(doc3.has_conflicts)
- self.db1.close()
- self.db2.close()
- db3.close()
-
- def test_sync_puts_changes(self):
- """
- Test if sync puts changes in remote replica.
-
- This test was adapted to decrypt remote content before assert.
- """
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db1.create_doc_from_json(tests.simple_doc)
- self.assertEqual(1, self.sync(self.db1, self.db2))
- self.assertGetEncryptedDoc(
- self.db2, doc.doc_id, doc.rev, tests.simple_doc, False)
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
- self.assertLastExchangeLog(
- self.db2,
- {'receive': {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
-
- def test_sync_pulls_changes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db2.create_doc_from_json(tests.simple_doc)
- self.db1.create_index('test-idx', 'key')
- self.assertEqual(0, self.sync(self.db1, self.db2))
- self.assertGetDoc(self.db1, doc.doc_id, doc.rev,
- tests.simple_doc, False)
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [], 'last_known_gen': 0},
- 'return':
- {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
- self.assertEqual([doc], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_supersedes_conflicts(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'target')
- self.db3 = self.create_database('test3', 'both')
- doc1 = self.db1.create_doc_from_json('{"a": 1}', doc_id='the-doc')
- self.db2.create_doc_from_json('{"b": 1}', doc_id='the-doc')
- self.db3.create_doc_from_json('{"c": 1}', doc_id='the-doc')
- self.sync(self.db3, self.db1)
- self.assertEqual(
- self.db1._get_generation_info(),
- self.db3._get_replica_gen_and_trans_id(self.db1._replica_uid))
- self.assertEqual(
- self.db3._get_generation_info(),
- self.db1._get_replica_gen_and_trans_id(self.db3._replica_uid))
- self.sync(self.db3, self.db2)
- self.assertEqual(
- self.db2._get_generation_info(),
- self.db3._get_replica_gen_and_trans_id(self.db2._replica_uid))
- self.assertEqual(
- self.db3._get_generation_info(),
- self.db2._get_replica_gen_and_trans_id(self.db3._replica_uid))
- self.assertEqual(3, len(self.db3.get_doc_conflicts('the-doc')))
- doc1.set_json('{"a": 2}')
- self.db1.put_doc(doc1)
- self.sync(self.db3, self.db1)
- # original doc1 should have been removed from conflicts
- self.assertEqual(3, len(self.db3.get_doc_conflicts('the-doc')))
-
- def test_sync_stops_after_get_sync_info(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc)
- self.sync(self.db1, self.db2)
-
- def put_hook(state):
- self.fail("Tracehook triggered for %s" % (state,))
-
- self.sync(self.db1, self.db2, trace_hook_shallow=put_hook)
-
- def test_sync_detects_rollback_in_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc1')
- self.sync(self.db1, self.db2)
- self.db1_copy = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidGeneration, self.sync, self.db1_copy, self.db2)
-
- def test_sync_detects_rollback_in_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.db2_copy = self.copy_database(self.db2)
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidGeneration, self.sync, self.db1, self.db2_copy)
-
- def test_sync_detects_diverged_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db3 = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.db3.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db3, self.db2)
-
- def test_sync_detects_diverged_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db3 = self.copy_database(self.db2)
- self.db3.create_doc_from_json(tests.nested_doc, doc_id="divergent")
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db1, self.db3)
-
- def test_sync_detects_rollback_and_divergence_in_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc1')
- self.sync(self.db1, self.db2)
- self.db1_copy = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.sync(self.db1, self.db2)
- self.db1_copy.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db1_copy.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db1_copy, self.db2)
-
- def test_sync_detects_rollback_and_divergence_in_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.db2_copy = self.copy_database(self.db2)
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.sync(self.db1, self.db2)
- self.db2_copy.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db2_copy.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db1, self.db2_copy)
-
-
-def make_local_db_and_soledad_target(
- test, path='test',
- source_replica_uid=uuid4().hex):
- test.startTwistedServer()
- replica_uid = os.path.basename(path)
- db = test.request_state._create_database(replica_uid)
- st = soledad_sync_target(
- test, db._dbname,
- source_replica_uid=source_replica_uid)
- return db, st
-
-
-target_scenarios = [
- ('leap', {
- 'create_db_and_target': make_local_db_and_soledad_target,
- 'make_app_with_state': make_soledad_app,
- 'do_sync': sync_via_synchronizer_and_soledad}),
-]
diff --git a/testing/tests/sync/test_sync.py b/testing/tests/sync/test_sync.py
deleted file mode 100644
index fb9a0245..00000000
--- a/testing/tests/sync/test_sync.py
+++ /dev/null
@@ -1,233 +0,0 @@
-# -*- coding: utf-8 -*-
-# test_sync.py
-# Copyright (C) 2013, 2014 LEAP
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import json
-import pytest
-import threading
-import time
-
-from six.moves.urllib.parse import urljoin
-from mock import Mock
-from twisted.internet import defer
-
-from testscenarios import TestWithScenarios
-
-from leap.soledad.common import couch
-from leap.soledad.client import sync
-
-from test_soledad import u1db_tests as tests
-from test_soledad.u1db_tests import TestCaseWithServer
-from test_soledad.u1db_tests import simple_doc
-from test_soledad.util import make_token_soledad_app
-from test_soledad.util import make_soledad_document_for_test
-from test_soledad.util import soledad_sync_target
-from test_soledad.util import BaseSoledadTest
-from test_soledad.util import SoledadWithCouchServerMixin
-from test_soledad.util import CouchDBTestCase
-
-
-class InterruptableSyncTestCase(
- BaseSoledadTest, CouchDBTestCase, TestCaseWithServer):
-
- """
- Tests for encrypted sync using Soledad server backed by a couch database.
- """
-
- @staticmethod
- def make_app_with_state(state):
- return make_token_soledad_app(state)
-
- make_document_for_test = make_soledad_document_for_test
-
- sync_target = soledad_sync_target
-
- def make_app(self):
- self.request_state = couch.CouchServerState(self.couch_url)
- return self.make_app_with_state(self.request_state)
-
- def setUp(self):
- TestCaseWithServer.setUp(self)
- CouchDBTestCase.setUp(self)
-
- def tearDown(self):
- CouchDBTestCase.tearDown(self)
- TestCaseWithServer.tearDown(self)
-
- def test_interruptable_sync(self):
- """
- Test if Soledad can sync many smallfiles.
- """
-
- self.skipTest("Sync is currently not interruptable.")
-
- class _SyncInterruptor(threading.Thread):
-
- """
- A thread meant to interrupt the sync process.
- """
-
- def __init__(self, soledad, couchdb):
- self._soledad = soledad
- self._couchdb = couchdb
- threading.Thread.__init__(self)
-
- def run(self):
- while db._get_generation() < 2:
- # print "WAITING %d" % db._get_generation()
- time.sleep(0.1)
- self._soledad.stop_sync()
- time.sleep(1)
-
- number_of_docs = 10
- self.startServer()
-
- # instantiate soledad and create a document
- sol = self._soledad_instance(
- user='user-uuid', server_url=self.getURL())
-
- # ensure remote db exists before syncing
- db = couch.CouchDatabase.open_database(
- urljoin(self.couch_url, 'user-user-uuid'),
- create=True)
-
- # create interruptor thread
- t = _SyncInterruptor(sol, db)
- t.start()
-
- d = sol.get_all_docs()
- d.addCallback(lambda results: self.assertEqual([], results[1]))
-
- def _create_docs(results):
- # create many small files
- deferreds = []
- for i in range(0, number_of_docs):
- deferreds.append(sol.create_doc(json.loads(simple_doc)))
- return defer.DeferredList(deferreds)
-
- # sync with server
- d.addCallback(_create_docs)
- d.addCallback(lambda _: sol.get_all_docs())
- d.addCallback(
- lambda results: self.assertEqual(number_of_docs, len(results[1])))
- d.addCallback(lambda _: sol.sync())
- d.addCallback(lambda _: t.join())
- d.addCallback(lambda _: db.get_all_docs())
- d.addCallback(
- lambda results: self.assertNotEqual(
- number_of_docs, len(results[1])))
- d.addCallback(lambda _: sol.sync())
- d.addCallback(lambda _: db.get_all_docs())
- d.addCallback(
- lambda results: self.assertEqual(number_of_docs, len(results[1])))
-
- def _tear_down(results):
- db.delete_database()
- db.close()
- sol.close()
-
- d.addCallback(_tear_down)
- return d
-
-
-@pytest.mark.needs_couch
-class TestSoledadDbSync(
- TestWithScenarios,
- SoledadWithCouchServerMixin,
- tests.TestCaseWithServer):
-
- """
- Test db.sync remote sync shortcut
- """
-
- scenarios = [
- ('py-token-http', {
- 'make_app_with_state': make_token_soledad_app,
- 'make_database_for_test': tests.make_memory_database_for_test,
- 'token': True
- }),
- ]
-
- oauth = False
- token = False
-
- def setUp(self):
- """
- Need to explicitely invoke inicialization on all bases.
- """
- SoledadWithCouchServerMixin.setUp(self)
- self.startTwistedServer()
- self.db = self.make_database_for_test(self, 'test1')
- self.db2 = self.request_state._create_database(replica_uid='test')
-
- def tearDown(self):
- """
- Need to explicitely invoke destruction on all bases.
- """
- SoledadWithCouchServerMixin.tearDown(self)
- # tests.TestCaseWithServer.tearDown(self)
-
- def do_sync(self):
- """
- Perform sync using SoledadSynchronizer, SoledadSyncTarget
- and Token auth.
- """
- target = soledad_sync_target(
- self, self.db2._dbname,
- source_replica_uid=self._soledad._dbpool.replica_uid)
- return sync.SoledadSynchronizer(
- self.db,
- target).sync()
-
- @defer.inlineCallbacks
- def test_db_sync(self):
- """
- Test sync.
-
- Adapted to check for encrypted content.
- """
-
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db2.create_doc_from_json(tests.nested_doc)
-
- local_gen_before_sync = yield self.do_sync()
- gen, _, changes = self.db.whats_changed(local_gen_before_sync)
- self.assertEqual(1, len(changes))
- self.assertEqual(doc2.doc_id, changes[0][0])
- self.assertEqual(1, gen - local_gen_before_sync)
- self.assertGetEncryptedDoc(
- self.db2, doc1.doc_id, doc1.rev, tests.simple_doc, False)
- self.assertGetEncryptedDoc(
- self.db, doc2.doc_id, doc2.rev, tests.nested_doc, False)
-
- # TODO: add u1db.tests.test_sync.TestRemoteSyncIntegration
-
-
-class TestSoledadSynchronizer(BaseSoledadTest):
-
- def setUp(self):
- BaseSoledadTest.setUp(self)
- self.db = Mock()
- self.target = Mock()
- self.synchronizer = sync.SoledadSynchronizer(
- self.db,
- self.target)
-
- def test_docs_by_gen_includes_deleted(self):
- changes = [('id', 'gen', 'trans')]
- docs_by_gen = self.synchronizer._docs_by_gen_from_changes(changes)
- f, args, kwargs = docs_by_gen[0][0]
- self.assertIn('include_deleted', kwargs)
- self.assertTrue(kwargs['include_deleted'])
diff --git a/testing/tests/sync/test_sync_mutex.py b/testing/tests/sync/test_sync_mutex.py
deleted file mode 100644
index fdd2aacd..00000000
--- a/testing/tests/sync/test_sync_mutex.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# -*- coding: utf-8 -*-
-# test_sync_mutex.py
-# Copyright (C) 2013, 2014 LEAP
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-
-"""
-Test that synchronization is a critical section and, as such, there might not
-be two concurrent synchronization processes at the same time.
-"""
-
-
-import pytest
-import time
-import uuid
-
-from six.moves.urllib.parse import urljoin
-
-from twisted.internet import defer
-
-from leap.soledad.client.sync import SoledadSynchronizer
-
-from leap.soledad.common.couch.state import CouchServerState
-from leap.soledad.common.couch import CouchDatabase
-from test_soledad.u1db_tests import TestCaseWithServer
-
-from test_soledad.util import CouchDBTestCase
-from test_soledad.util import BaseSoledadTest
-from test_soledad.util import make_token_soledad_app
-from test_soledad.util import make_soledad_document_for_test
-from test_soledad.util import soledad_sync_target
-
-
-# monkey-patch the soledad synchronizer so it stores start and finish times
-
-_old_sync = SoledadSynchronizer.sync
-
-
-def _timed_sync(self):
- t = time.time()
-
- sync_id = uuid.uuid4()
-
- if not getattr(self.source, 'sync_times', False):
- self.source.sync_times = {}
-
- self.source.sync_times[sync_id] = {'start': t}
-
- def _store_finish_time(passthrough):
- t = time.time()
- self.source.sync_times[sync_id]['end'] = t
- return passthrough
-
- d = _old_sync(self)
- d.addBoth(_store_finish_time)
- return d
-
-
-SoledadSynchronizer.sync = _timed_sync
-
-# -- end of monkey-patching
-
-
-@pytest.mark.needs_couch
-class TestSyncMutex(
- BaseSoledadTest, CouchDBTestCase, TestCaseWithServer):
-
- @staticmethod
- def make_app_with_state(state):
- return make_token_soledad_app(state)
-
- make_document_for_test = make_soledad_document_for_test
-
- sync_target = soledad_sync_target
-
- def make_app(self):
- self.request_state = CouchServerState(self.couch_url)
- return self.make_app_with_state(self.request_state)
-
- def setUp(self):
- TestCaseWithServer.setUp(self)
- CouchDBTestCase.setUp(self)
- self.user = ('user-%s' % uuid.uuid4().hex)
-
- def tearDown(self):
- CouchDBTestCase.tearDown(self)
- TestCaseWithServer.tearDown(self)
-
- def test_two_concurrent_syncs_do_not_overlap_no_docs(self):
- self.startServer()
-
- # ensure remote db exists before syncing
- db = CouchDatabase.open_database(
- urljoin(self.couch_url, 'user-' + self.user),
- create=True)
-
- sol = self._soledad_instance(
- user=self.user, server_url=self.getURL())
-
- d1 = sol.sync()
- d2 = sol.sync()
-
- def _assert_syncs_do_not_overlap(thearg):
- # recover sync times
- sync_times = []
- for key in sol._dbsyncer.sync_times:
- sync_times.append(sol._dbsyncer.sync_times[key])
- sync_times.sort(key=lambda s: s['start'])
-
- self.assertTrue(
- (sync_times[0]['start'] < sync_times[0]['end'] and
- sync_times[0]['end'] < sync_times[1]['start'] and
- sync_times[1]['start'] < sync_times[1]['end']))
-
- db.delete_database()
- db.close()
- sol.close()
-
- d = defer.gatherResults([d1, d2])
- d.addBoth(_assert_syncs_do_not_overlap)
- return d
diff --git a/testing/tests/sync/test_sync_target.py b/testing/tests/sync/test_sync_target.py
deleted file mode 100644
index 712f0d3f..00000000
--- a/testing/tests/sync/test_sync_target.py
+++ /dev/null
@@ -1,968 +0,0 @@
-# -*- coding: utf-8 -*-
-# test_sync_target.py
-# Copyright (C) 2013, 2014 LEAP
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-Test Leap backend bits: sync target
-"""
-import os
-import time
-import json
-import pytest
-import random
-import string
-import shutil
-
-from six import StringIO as cStringIO
-from uuid import uuid4
-
-from testscenarios import TestWithScenarios
-from twisted.internet import defer
-
-from leap.soledad.client import http_target as target
-from leap.soledad.client.http_target.fetch_protocol import DocStreamReceiver
-from leap.soledad.client._db.sqlcipher import SQLCipherU1DBSync
-from leap.soledad.client._db.sqlcipher import SQLCipherOptions
-from leap.soledad.client._db.sqlcipher import SQLCipherDatabase
-from leap.soledad.client import _crypto
-
-from leap.soledad.common import l2db
-
-from leap.soledad.common.document import SoledadDocument
-from test_soledad import u1db_tests as tests
-from test_soledad.util import make_sqlcipher_database_for_test
-from test_soledad.util import make_soledad_app
-from test_soledad.util import make_token_soledad_app
-from test_soledad.util import make_soledad_document_for_test
-from test_soledad.util import soledad_sync_target
-from twisted.trial import unittest
-from test_soledad.util import SoledadWithCouchServerMixin
-from test_soledad.util import ADDRESS
-from test_soledad.util import SQLCIPHER_SCENARIOS
-
-
-# -----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_remote_sync_target`.
-# -----------------------------------------------------------------------------
-
-class TestSoledadParseReceivedDocResponse(unittest.TestCase):
-
- """
- Some tests had to be copied to this class so we can instantiate our own
- target.
- """
-
- def parse(self, stream):
- parser = DocStreamReceiver(None, defer.Deferred(),
- lambda *_: defer.succeed(42))
- parser.dataReceived(stream)
- parser.finish()
-
- def test_extra_comma(self):
- doc = SoledadDocument('i', rev='r')
- doc.content = {'a': 'b'}
-
- encrypted_docstr = _crypto.SoledadCrypto('safe').encrypt_doc(doc)
-
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse("[\r\n{},\r\n]")
-
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse(
- ('[\r\n{},\r\n{"id": "i", "rev": "r", ' +
- '"gen": 3, "trans_id": "T-sid"},\r\n' +
- '%s,\r\n]') % encrypted_docstr)
-
- def test_wrong_start(self):
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse("{}\r\n]")
-
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse("\r\n{}\r\n]")
-
- def test_wrong_end(self):
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse("[\r\n{}")
-
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse("[\r\n")
-
- def test_missing_comma(self):
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse(
- '[\r\n{}\r\n{"id": "i", "rev": "r", '
- '"content": "c", "gen": 3}\r\n]')
-
- def test_no_entries(self):
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse("[\r\n]")
-
- def test_error_in_stream(self):
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse(
- '[\r\n{"new_generation": 0},'
- '\r\n{"error": "unavailable"}\r\n')
-
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse(
- '[\r\n{"error": "unavailable"}\r\n')
-
- with self.assertRaises(l2db.errors.BrokenSyncStream):
- self.parse('[\r\n{"error": "?"}\r\n')
-
-#
-# functions for TestRemoteSyncTargets
-#
-
-
-def make_local_db_and_soledad_target(
- test, path='test',
- source_replica_uid=uuid4().hex):
- test.startTwistedServer()
- replica_uid = os.path.basename(path)
- db = test.request_state._create_database(replica_uid)
- st = soledad_sync_target(
- test, db._dbname,
- source_replica_uid=source_replica_uid)
- return db, st
-
-
-def make_local_db_and_token_soledad_target(
- test,
- source_replica_uid=uuid4().hex):
- db, st = make_local_db_and_soledad_target(
- test, path='test',
- source_replica_uid=source_replica_uid)
- st.set_token_credentials('user-uuid', 'auth-token')
- return db, st
-
-
-@pytest.mark.needs_couch
-class TestSoledadSyncTarget(
- TestWithScenarios,
- SoledadWithCouchServerMixin,
- tests.TestCaseWithServer):
-
- scenarios = [
- ('token_soledad',
- {'make_app_with_state': make_token_soledad_app,
- 'make_document_for_test': make_soledad_document_for_test,
- 'create_db_and_target': make_local_db_and_token_soledad_target,
- 'make_database_for_test': make_sqlcipher_database_for_test,
- 'sync_target': soledad_sync_target}),
- ]
-
- def getSyncTarget(self, path=None, source_replica_uid=uuid4().hex):
- if self.port is None:
- self.startTwistedServer()
- if path is None:
- path = self.db2._dbname
- target = self.sync_target(
- self, path,
- source_replica_uid=source_replica_uid)
- return target
-
- def setUp(self):
- TestWithScenarios.setUp(self)
- SoledadWithCouchServerMixin.setUp(self)
- self.startTwistedServer()
- self.db1 = make_sqlcipher_database_for_test(self, 'test1')
- self.db2 = self.request_state._create_database('test')
-
- def tearDown(self):
- # db2, _ = self.request_state.ensure_database('test2')
- self.delete_db(self.db2._dbname)
- self.db1.close()
- SoledadWithCouchServerMixin.tearDown(self)
- TestWithScenarios.tearDown(self)
-
- @defer.inlineCallbacks
- def test_sync_exchange_send(self):
- """
- Test for sync exchanging send of document.
-
- This test was adapted to decrypt remote content before assert.
- """
- db = self.db2
- remote_target = self.getSyncTarget()
- other_docs = []
-
- def receive_doc(doc, gen, trans_id):
- other_docs.append((doc.doc_id, doc.rev, doc.get_json()))
-
- doc = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- get_doc = (lambda _: doc, (1,), {})
- new_gen, trans_id = yield remote_target.sync_exchange(
- [(get_doc, 10, 'T-sid')], 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=receive_doc)
- self.assertEqual(1, new_gen)
- self.assertGetEncryptedDoc(
- db, 'doc-here', 'replica:1', '{"value": "here"}', False)
-
- @defer.inlineCallbacks
- def test_sync_exchange_send_failure_and_retry_scenario(self):
- """
- Test for sync exchange failure and retry.
-
- This test was adapted to decrypt remote content before assert.
- """
-
- def blackhole_getstderr(inst):
- return cStringIO.StringIO()
-
- db = self.db2
- _put_doc_if_newer = db._put_doc_if_newer
- trigger_ids = ['doc-here2']
-
- def bomb_put_doc_if_newer(self, doc, save_conflict,
- replica_uid=None, replica_gen=None,
- replica_trans_id=None, number_of_docs=None,
- doc_idx=None, sync_id=None):
- if doc.doc_id in trigger_ids:
- raise l2db.errors.U1DBError
- return _put_doc_if_newer(doc, save_conflict=save_conflict,
- replica_uid=replica_uid,
- replica_gen=replica_gen,
- replica_trans_id=replica_trans_id,
- number_of_docs=number_of_docs,
- doc_idx=doc_idx, sync_id=sync_id)
- from leap.soledad.common.backend import SoledadBackend
- self.patch(
- SoledadBackend, '_put_doc_if_newer', bomb_put_doc_if_newer)
- remote_target = self.getSyncTarget(
- source_replica_uid='replica')
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- doc1 = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- doc2 = self.make_document('doc-here2', 'replica:1',
- '{"value": "here2"}')
- get_doc1 = (lambda _: doc1, (1,), {})
- get_doc2 = (lambda _: doc2, (2,), {})
-
- with self.assertRaises(l2db.errors.U1DBError):
- yield remote_target.sync_exchange(
- [(get_doc1, 10, 'T-sid'), (get_doc2, 11, 'T-sud')],
- 'replica',
- last_known_generation=0,
- last_known_trans_id=None,
- insert_doc_cb=receive_doc)
-
- self.assertGetEncryptedDoc(
- db, 'doc-here', 'replica:1', '{"value": "here"}',
- False)
- self.assertEqual(
- (10, 'T-sid'), db._get_replica_gen_and_trans_id('replica'))
- self.assertEqual([], other_changes)
- # retry
- trigger_ids = []
- new_gen, trans_id = yield remote_target.sync_exchange(
- [(get_doc2, 11, 'T-sud')], 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=receive_doc)
- self.assertGetEncryptedDoc(
- db, 'doc-here2', 'replica:1', '{"value": "here2"}',
- False)
- self.assertEqual(
- (11, 'T-sud'), db._get_replica_gen_and_trans_id('replica'))
- self.assertEqual(2, new_gen)
- self.assertEqual(
- ('doc-here', 'replica:1', '{"value": "here"}', 1),
- other_changes[0][:-1])
-
- @defer.inlineCallbacks
- def test_sync_exchange_send_ensure_callback(self):
- """
- Test for sync exchange failure and retry.
-
- This test was adapted to decrypt remote content before assert.
- """
- remote_target = self.getSyncTarget()
- other_docs = []
- replica_uid_box = []
-
- def receive_doc(doc, gen, trans_id):
- other_docs.append((doc.doc_id, doc.rev, doc.get_json()))
-
- def ensure_cb(replica_uid):
- replica_uid_box.append(replica_uid)
-
- doc = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- get_doc = (lambda _: doc, (1,), {})
- new_gen, trans_id = yield remote_target.sync_exchange(
- [(get_doc, 10, 'T-sid')], 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=receive_doc,
- ensure_callback=ensure_cb)
- self.assertEqual(1, new_gen)
- db = self.db2
- self.assertEqual(1, len(replica_uid_box))
- self.assertEqual(db._replica_uid, replica_uid_box[0])
- self.assertGetEncryptedDoc(
- db, 'doc-here', 'replica:1', '{"value": "here"}', False)
-
- @defer.inlineCallbacks
- def test_sync_exchange_send_events(self):
- """
- Test for sync exchange's SOLEDAD_SYNC_SEND_STATUS event.
- """
- remote_target = self.getSyncTarget()
- uuid = remote_target.uuid
- events = []
-
- def mocked_events(*args):
- events.append((args))
- self.patch(
- target.send, '_emit_send_status', mocked_events)
-
- doc = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- doc2 = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- doc3 = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- get_doc = (lambda _: doc, (1,), {})
- get_doc2 = (lambda _: doc2, (1,), {})
- get_doc3 = (lambda _: doc3, (1,), {})
- docs = [(get_doc, 10, 'T-sid'),
- (get_doc2, 11, 'T-sid2'), (get_doc3, 12, 'T-sid3')]
- new_gen, trans_id = yield remote_target.sync_exchange(
- docs, 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=lambda _: 1,
- ensure_callback=lambda _: 1)
- self.assertEqual(1, new_gen)
- self.assertEqual(4, len(events))
- self.assertEquals([(uuid, 0, 3), (uuid, 1, 3), (uuid, 2, 3),
- (uuid, 3, 3)], events)
-
- def test_sync_exchange_in_stream_error(self):
- self.skipTest("bypass this test because our sync_exchange process "
- "does not return u1db error 503 \"unavailable\" for "
- "now")
-
- @defer.inlineCallbacks
- def test_get_sync_info(self):
- db = self.db2
- db._set_replica_gen_and_trans_id('other-id', 1, 'T-transid')
- remote_target = self.getSyncTarget(
- source_replica_uid='other-id')
- sync_info = yield remote_target.get_sync_info('other-id')
- self.assertEqual(
- ('test', 0, '', 1, 'T-transid'),
- sync_info)
-
- @defer.inlineCallbacks
- def test_record_sync_info(self):
- remote_target = self.getSyncTarget(
- source_replica_uid='other-id')
- yield remote_target.record_sync_info('other-id', 2, 'T-transid')
- self.assertEqual((2, 'T-transid'),
- self.db2._get_replica_gen_and_trans_id('other-id'))
-
- @defer.inlineCallbacks
- def test_sync_exchange_receive(self):
- db = self.db2
- doc = db.create_doc_from_json('{"value": "there"}')
- remote_target = self.getSyncTarget()
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- new_gen, trans_id = yield remote_target.sync_exchange(
- [], 'replica', last_known_generation=0, last_known_trans_id=None,
- insert_doc_cb=receive_doc)
- self.assertEqual(1, new_gen)
- self.assertEqual(
- (doc.doc_id, doc.rev, '{"value": "there"}', 1),
- other_changes[0][:-1])
-
-
-# -----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_sync`.
-# -----------------------------------------------------------------------------
-
-target_scenarios = [
- ('mem,token_soledad',
- {'create_db_and_target': make_local_db_and_token_soledad_target,
- 'make_app_with_state': make_soledad_app,
- 'make_database_for_test': tests.make_memory_database_for_test,
- 'copy_database_for_test': tests.copy_memory_database_for_test,
- 'make_document_for_test': tests.make_document_for_test})
-]
-
-
-@pytest.mark.needs_couch
-class SoledadDatabaseSyncTargetTests(
- TestWithScenarios,
- SoledadWithCouchServerMixin,
- tests.DatabaseBaseTests,
- tests.TestCaseWithServer):
- """
- Adaptation of u1db.tests.test_sync.DatabaseSyncTargetTests.
- """
-
- # TODO: implement _set_trace_hook(_shallow) in SoledadHTTPSyncTarget so
- # skipped tests can be succesfully executed.
-
- scenarios = target_scenarios
-
- whitebox = False
-
- def setUp(self):
- tests.TestCaseWithServer.setUp(self)
- self.other_changes = []
- SoledadWithCouchServerMixin.setUp(self)
- self.db, self.st = make_local_db_and_soledad_target(self)
-
- def tearDown(self):
- self.db.close()
- tests.TestCaseWithServer.tearDown(self)
- SoledadWithCouchServerMixin.tearDown(self)
-
- def set_trace_hook(self, callback, shallow=False):
- setter = (self.st._set_trace_hook if not shallow else
- self.st._set_trace_hook_shallow)
- try:
- setter(callback)
- except NotImplementedError:
- self.skipTest("%s does not implement _set_trace_hook"
- % (self.st.__class__.__name__,))
-
- @defer.inlineCallbacks
- def test_sync_exchange(self):
- """
- Test sync exchange.
-
- This test was adapted to decrypt remote content before assert.
- """
- docs_by_gen = [
- ((self.make_document,
- ('doc-id', 'replica:1', tests.simple_doc,), {}),
- 10, 'T-sid')]
- new_gen, trans_id = yield self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertGetEncryptedDoc(
- self.db, 'doc-id', 'replica:1', tests.simple_doc, False)
- self.assertTransactionLog(['doc-id'], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 1, last_trans_id),
- (self.other_changes, new_gen, last_trans_id))
- sync_info = yield self.st.get_sync_info('replica')
- self.assertEqual(10, sync_info[3])
-
- @defer.inlineCallbacks
- def test_sync_exchange_push_many(self):
- """
- Test sync exchange.
-
- This test was adapted to decrypt remote content before assert.
- """
- docs_by_gen = [
- ((self.make_document,
- ('doc-id', 'replica:1', tests.simple_doc), {}), 10, 'T-1'),
- ((self.make_document,
- ('doc-id2', 'replica:1', tests.nested_doc), {}), 11, 'T-2')]
- new_gen, trans_id = yield self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertGetEncryptedDoc(
- self.db, 'doc-id', 'replica:1', tests.simple_doc, False)
- self.assertGetEncryptedDoc(
- self.db, 'doc-id2', 'replica:1', tests.nested_doc, False)
- self.assertTransactionLog(['doc-id', 'doc-id2'], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 2, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- sync_info = yield self.st.get_sync_info('replica')
- self.assertEqual(11, sync_info[3])
-
- @defer.inlineCallbacks
- def test_sync_exchange_returns_many_new_docs(self):
- """
- Test sync exchange.
-
- This test was adapted to avoid JSON serialization comparison as local
- and remote representations might differ. It looks directly at the
- doc's contents instead.
- """
- doc = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db.create_doc_from_json(tests.nested_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- new_gen, _ = yield self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- self.assertEqual(2, new_gen)
- self.assertEqual(
- [(doc.doc_id, doc.rev, 1),
- (doc2.doc_id, doc2.rev, 2)],
- [c[:-3] + c[-2:-1] for c in self.other_changes])
- self.assertEqual(
- json.loads(tests.simple_doc),
- json.loads(self.other_changes[0][2]))
- self.assertEqual(
- json.loads(tests.nested_doc),
- json.loads(self.other_changes[1][2]))
- if self.whitebox:
- self.assertEqual(
- self.db._last_exchange_log['return'],
- {'last_gen': 2, 'docs':
- [(doc.doc_id, doc.rev), (doc2.doc_id, doc2.rev)]})
-
- def receive_doc(self, doc, gen, trans_id):
- self.other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- def test_get_sync_target(self):
- self.assertIsNot(None, self.st)
-
- @defer.inlineCallbacks
- def test_get_sync_info(self):
- sync_info = yield self.st.get_sync_info('other')
- self.assertEqual(
- ('test', 0, '', 0, ''), sync_info)
-
- @defer.inlineCallbacks
- def test_create_doc_updates_sync_info(self):
- sync_info = yield self.st.get_sync_info('other')
- self.assertEqual(
- ('test', 0, '', 0, ''), sync_info)
- self.db.create_doc_from_json(tests.simple_doc)
- sync_info = yield self.st.get_sync_info('other')
- self.assertEqual(1, sync_info[1])
-
- @defer.inlineCallbacks
- def test_record_sync_info(self):
- yield self.st.record_sync_info('replica', 10, 'T-transid')
- sync_info = yield self.st.get_sync_info('replica')
- self.assertEqual(
- ('test', 0, '', 10, 'T-transid'), sync_info)
-
- @defer.inlineCallbacks
- def test_sync_exchange_deleted(self):
- doc = self.db.create_doc_from_json('{}')
- edit_rev = 'replica:1|' + doc.rev
- docs_by_gen = [
- ((self.make_document, (doc.doc_id, edit_rev, None), {}),
- 10, 'T-sid')]
- new_gen, trans_id = yield self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, edit_rev, None, False)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 2, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- sync_info = yield self.st.get_sync_info('replica')
- self.assertEqual(10, sync_info[3])
-
- @defer.inlineCallbacks
- def test_sync_exchange_refuses_conflicts(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- ((self.make_document, (doc.doc_id, 'replica:1', new_doc), {}), 10,
- 'T-sid')]
- new_gen, _ = yield self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, tests.simple_doc, 1),
- self.other_changes[0][:-1])
- self.assertEqual(1, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 1, 'docs': [(doc.doc_id, doc.rev)]})
-
- @defer.inlineCallbacks
- def test_sync_exchange_ignores_convergence(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- gen, txid = self.db._get_generation_info()
- docs_by_gen = [
- ((self.make_document, (doc.doc_id, doc.rev, tests.simple_doc), {}),
- 10, 'T-sid')]
- new_gen, _ = yield self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=gen,
- last_known_trans_id=txid, insert_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(([], 1), (self.other_changes, new_gen))
-
- @defer.inlineCallbacks
- def test_sync_exchange_returns_new_docs(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_gen, _ = yield self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, tests.simple_doc, 1),
- self.other_changes[0][:-1])
- self.assertEqual(1, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 1, 'docs': [(doc.doc_id, doc.rev)]})
-
- @defer.inlineCallbacks
- def test_sync_exchange_returns_deleted_docs(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.db.delete_doc(doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- new_gen, _ = yield self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- self.assertEqual(2, new_gen)
- self.assertEqual(
- (doc.doc_id, doc.rev, None, 2), self.other_changes[0][:-1])
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 2, 'docs': [(doc.doc_id, doc.rev)]})
-
- @defer.inlineCallbacks
- def test_sync_exchange_getting_newer_docs(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- ((self.make_document, (doc.doc_id, 'test:1|z:2', new_doc), {}), 10,
- 'T-sid')]
- new_gen, _ = yield self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- @defer.inlineCallbacks
- def test_sync_exchange_with_concurrent_updates_of_synced_doc(self):
- expected = []
-
- def before_whatschanged_cb(state):
- if state != 'before whats_changed':
- return
- cont = '{"key": "cuncurrent"}'
- conc_rev = self.db.put_doc(
- self.make_document(doc.doc_id, 'test:1|z:2', cont))
- expected.append((doc.doc_id, conc_rev, cont, 3))
-
- self.set_trace_hook(before_whatschanged_cb)
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- ((self.make_document, (doc.doc_id, 'test:1|z:2', new_doc), {}), 10,
- 'T-sid')]
- new_gen, _ = yield self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertEqual(expected, [c[:-1] for c in self.other_changes])
- self.assertEqual(3, new_gen)
-
- @defer.inlineCallbacks
- def test_sync_exchange_with_concurrent_updates(self):
-
- def after_whatschanged_cb(state):
- if state != 'after whats_changed':
- return
- self.db.create_doc_from_json('{"new": "doc"}')
-
- self.set_trace_hook(after_whatschanged_cb)
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- ((self.make_document, (doc.doc_id, 'test:1|z:2', new_doc), {}), 10,
- 'T-sid')]
- new_gen, _ = yield self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- @defer.inlineCallbacks
- def test_sync_exchange_converged_handling(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- docs_by_gen = [
- ((self.make_document, ('new', 'other:1', '{}'), {}), 4, 'T-foo'),
- ((self.make_document, (doc.doc_id, doc.rev, doc.get_json()), {}),
- 5, 'T-bar')]
- new_gen, _ = yield self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, insert_doc_cb=self.receive_doc)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- @defer.inlineCallbacks
- def test_sync_exchange_detect_incomplete_exchange(self):
- def before_get_docs_explode(state):
- if state != 'before get_docs':
- return
- raise l2db.errors.U1DBError("fail")
- self.set_trace_hook(before_get_docs_explode)
- # suppress traceback printing in the wsgiref server
- # self.patch(simple_server.ServerHandler,
- # 'log_exception', lambda h, exc_info: None)
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertRaises(
- (l2db.errors.U1DBError, l2db.errors.BrokenSyncStream),
- self.st.sync_exchange, [], 'other-replica',
- last_known_generation=0, last_known_trans_id=None,
- insert_doc_cb=self.receive_doc)
-
- @defer.inlineCallbacks
- def test_sync_exchange_doc_ids(self):
- sync_exchange_doc_ids = getattr(self.st, 'sync_exchange_doc_ids', None)
- if sync_exchange_doc_ids is None:
- self.skipTest("sync_exchange_doc_ids not implemented")
- db2 = self.create_database('test2')
- doc = db2.create_doc_from_json(tests.simple_doc)
- new_gen, trans_id = yield sync_exchange_doc_ids(
- db2, [(doc.doc_id, 10, 'T-sid')], 0, None,
- insert_doc_cb=self.receive_doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev,
- tests.simple_doc, False)
- self.assertTransactionLog([doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 1, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- self.assertEqual(10, self.st.get_sync_info(db2._replica_uid)[3])
-
- @defer.inlineCallbacks
- def test__set_trace_hook(self):
- called = []
-
- def cb(state):
- called.append(state)
-
- self.set_trace_hook(cb)
- yield self.st.sync_exchange([], 'replica', 0, None, self.receive_doc)
- yield self.st.record_sync_info('replica', 0, 'T-sid')
- self.assertEqual(['before whats_changed',
- 'after whats_changed',
- 'before get_docs',
- 'record_sync_info',
- ],
- called)
-
- @defer.inlineCallbacks
- def test__set_trace_hook_shallow(self):
- if (self.st._set_trace_hook_shallow == self.st._set_trace_hook or
- self.st._set_trace_hook_shallow.im_func ==
- target.SoledadHTTPSyncTarget._set_trace_hook_shallow.im_func):
- # shallow same as full
- expected = ['before whats_changed',
- 'after whats_changed',
- 'before get_docs',
- 'record_sync_info',
- ]
- else:
- expected = ['sync_exchange', 'record_sync_info']
-
- called = []
-
- def cb(state):
- called.append(state)
-
- self.set_trace_hook(cb, shallow=True)
- yield self.st.sync_exchange([], 'replica', 0, None, self.receive_doc)
- yield self.st.record_sync_info('replica', 0, 'T-sid')
- self.assertEqual(expected, called)
-
-
-WAIT_STEP = 1
-MAX_WAIT = 10
-DBPASS = "pass"
-
-
-class SyncTimeoutError(Exception):
-
- """
- Dummy exception to notify timeout during sync.
- """
- pass
-
-
-@pytest.mark.needs_couch
-class TestSoledadDbSync(
- TestWithScenarios,
- SoledadWithCouchServerMixin,
- tests.TestCaseWithServer):
-
- """Test db.sync remote sync shortcut"""
-
- scenarios = [
- ('py-token-http', {
- 'create_db_and_target': make_local_db_and_token_soledad_target,
- 'make_app_with_state': make_token_soledad_app,
- 'make_database_for_test': make_sqlcipher_database_for_test,
- 'token': True
- }),
- ]
-
- oauth = False
- token = False
-
- def setUp(self):
- """
- Need to explicitely invoke inicialization on all bases.
- """
- SoledadWithCouchServerMixin.setUp(self)
- self.server = self.server_thread = None
- self.startTwistedServer()
- self.syncer = None
-
- # config info
- self.db1_file = os.path.join(self.tempdir, "db1.u1db")
- os.unlink(self.db1_file)
- self.db_pass = DBPASS
- self.email = ADDRESS
-
- # get a random prefix for each test, so we do not mess with
- # concurrency during initialization and shutting down of
- # each local db.
- self.rand_prefix = ''.join(
- map(lambda x: random.choice(string.ascii_letters), range(6)))
-
- # open test dbs: db1 will be the local sqlcipher db (which
- # instantiates a syncdb). We use the self._soledad instance that was
- # already created on some setUp method.
- import binascii
- tohex = binascii.b2a_hex
- key = tohex(self._soledad.secrets.local_key)
- dbpath = self._soledad._local_db_path
-
- self.opts = SQLCipherOptions(
- dbpath, key, is_raw_key=True, create=False)
- self.db1 = SQLCipherDatabase(self.opts)
-
- self.db2 = self.request_state._create_database(replica_uid='test')
-
- def tearDown(self):
- """
- Need to explicitely invoke destruction on all bases.
- """
- dbsyncer = getattr(self, 'dbsyncer', None)
- if dbsyncer:
- dbsyncer.close()
- self.db1.close()
- self.db2.close()
- self._soledad.close()
-
- # XXX should not access "private" attrs
- shutil.rmtree(os.path.dirname(self._soledad._local_db_path))
- SoledadWithCouchServerMixin.tearDown(self)
-
- def do_sync(self, target_name):
- """
- Perform sync using SoledadSynchronizer, SoledadSyncTarget
- and Token auth.
- """
- if self.token:
- creds = {'token': {
- 'uuid': 'user-uuid',
- 'token': 'auth-token',
- }}
- target_url = self.getURL(self.db2._dbname)
-
- # get a u1db syncer
- crypto = self._soledad._crypto
- replica_uid = self.db1._replica_uid
- dbsyncer = SQLCipherU1DBSync(
- self.opts,
- crypto,
- replica_uid,
- None)
- self.dbsyncer = dbsyncer
- return dbsyncer.sync(target_url,
- creds=creds)
- else:
- return self._do_sync(self, target_name)
-
- def _do_sync(self, target_name):
- if self.oauth:
- path = '~/' + target_name
- extra = dict(creds={'oauth': {
- 'consumer_key': tests.consumer1.key,
- 'consumer_secret': tests.consumer1.secret,
- 'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret,
- }})
- else:
- path = target_name
- extra = {}
- target_url = self.getURL(path)
- return self.db.sync(target_url, **extra)
-
- def wait_for_sync(self):
- """
- Wait for sync to finish.
- """
- wait = 0
- syncer = self.syncer
- if syncer is not None:
- while syncer.syncing:
- time.sleep(WAIT_STEP)
- wait += WAIT_STEP
- if wait >= MAX_WAIT:
- raise SyncTimeoutError
-
- def test_db_sync(self):
- """
- Test sync.
-
- Adapted to check for encrypted content.
- """
- doc1 = self.db1.create_doc_from_json(tests.simple_doc)
- doc2 = self.db2.create_doc_from_json(tests.nested_doc)
- d = self.do_sync('test')
-
- def _assert_successful_sync(results):
- import time
- # need to give time to the encryption to proceed
- # TODO should implement a defer list to subscribe to the
- # all-decrypted event
- time.sleep(2)
- local_gen_before_sync = results
- self.wait_for_sync()
-
- gen, _, changes = self.db1.whats_changed(local_gen_before_sync)
- self.assertEqual(1, len(changes))
-
- self.assertEqual(doc2.doc_id, changes[0][0])
- self.assertEqual(1, gen - local_gen_before_sync)
-
- self.assertGetEncryptedDoc(
- self.db2, doc1.doc_id, doc1.rev, tests.simple_doc, False)
- self.assertGetEncryptedDoc(
- self.db1, doc2.doc_id, doc2.rev, tests.nested_doc, False)
-
- d.addCallback(_assert_successful_sync)
- return d
-
-
-@pytest.mark.needs_couch
-class SQLCipherSyncTargetTests(SoledadDatabaseSyncTargetTests):
-
- # TODO: implement _set_trace_hook(_shallow) in SoledadHTTPSyncTarget so
- # skipped tests can be succesfully executed.
-
- scenarios = (tests.multiply_scenarios(SQLCIPHER_SCENARIOS,
- target_scenarios))
-
- whitebox = False