summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README9
-rw-r--r--__init__.py38
-rw-r--r--backends/couch.py49
-rw-r--r--backends/leap_backend.py8
-rw-r--r--backends/objectstore.py22
-rw-r--r--backends/openstack.py14
-rw-r--r--backends/sqlcipher.py16
-rw-r--r--tests/test_couch.py27
-rw-r--r--tests/test_encrypted.py14
-rw-r--r--tests/test_leap_backend.py58
-rw-r--r--tests/test_logs.py65
-rw-r--r--tests/test_sqlcipher.py46
-rw-r--r--tests/u1db_tests/__init__.py84
-rw-r--r--tests/u1db_tests/test_backends.py99
-rw-r--r--tests/u1db_tests/test_document.py6
-rw-r--r--tests/u1db_tests/test_http_app.py55
-rw-r--r--tests/u1db_tests/test_http_client.py12
-rw-r--r--tests/u1db_tests/test_http_database.py20
-rw-r--r--tests/u1db_tests/test_https.py9
-rw-r--r--tests/u1db_tests/test_open.py2
-rw-r--r--tests/u1db_tests/test_remote_sync_target.py11
-rw-r--r--tests/u1db_tests/test_sqlite_backend.py29
-rw-r--r--tests/u1db_tests/test_sync.py131
-rw-r--r--util.py8
24 files changed, 434 insertions, 398 deletions
diff --git a/README b/README
index 9896d2bf..3bf62494 100644
--- a/README
+++ b/README
@@ -28,5 +28,10 @@ Soledad's tests should be run with nose2, like this:
nose2 leap.soledad.tests
-CouchDB backend tests need an http CouchDB instance running on
-`localhost:5984`.
+Right now, there are 3 conditions that have to be met for all Soledad tests to
+pass without problems:
+
+ 1. Use nose2.
+ 2. Have an http CouchDB instance running on `localhost:5984`.
+ 3. Have sqlcipher configured (using LD_PRELOAD or LD_LIBRARY_CONFIG to point
+ to the place where libsqlite3.so.0 is located).
diff --git a/__init__.py b/__init__.py
index 1473da38..cbd4bb0d 100644
--- a/__init__.py
+++ b/__init__.py
@@ -10,12 +10,13 @@ from leap.soledad.backends import sqlcipher
from leap.soledad.util import GPGWrapper
import util
+
class Soledad(object):
# paths
- PREFIX = os.environ['HOME'] + '/.config/leap/soledad'
- SECRET_PATH = PREFIX + '/secret.gpg'
- GNUPG_HOME = PREFIX + '/gnupg'
+ PREFIX = os.environ['HOME'] + '/.config/leap/soledad'
+ SECRET_PATH = PREFIX + '/secret.gpg'
+ GNUPG_HOME = PREFIX + '/gnupg'
LOCAL_DB_PATH = PREFIX + '/soledad.u1db'
# other configs
@@ -52,7 +53,8 @@ class Soledad(object):
def _has_secret(self):
"""
- Verify if secret for symmetric encryption exists on local encrypted file.
+ Verify if secret for symmetric encryption exists on local encrypted
+ file.
"""
# TODO: verify if file is a GPG-encrypted file and if we have the
# corresponding private key for decryption.
@@ -66,16 +68,20 @@ class Soledad(object):
"""
try:
with open(self.SECRET_PATH) as f:
- self._secret = str(self._gpg.decrypt(f.read()))
+ self._secret = str(self._gpg.decrypt(f.read()))
except IOError as e:
- raise IOError('Failed to open secret file %s.' % self.SECRET_PATH)
+ raise IOError('Failed to open secret file %s.' % self.SECRET_PATH)
def _gen_secret(self):
"""
- Generate a secret for symmetric encryption and store in a local encrypted file.
+ Generate a secret for symmetric encryption and store in a local
+ encrypted file.
"""
- self._secret = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(self.SECRET_LENGTH))
- ciphertext = self._gpg.encrypt(self._secret, self._fingerprint, self._fingerprint)
+ self._secret = ''.join(random.choice(string.ascii_uppercase +
+ string.digits) for x in
+ range(self.SECRET_LENGTH))
+ ciphertext = self._gpg.encrypt(self._secret, self._fingerprint,
+ self._fingerprint)
f = open(self.SECRET_PATH, 'w')
f.write(str(ciphertext))
f.close()
@@ -100,11 +106,11 @@ class Soledad(object):
Generate an OpenPGP keypair for this user.
"""
params = self._gpg.gen_key_input(
- key_type='RSA',
- key_length=4096,
- name_real=self._user_email,
- name_email=self._user_email,
- name_comment='Generated by LEAP Soledad.')
+ key_type='RSA',
+ key_length=4096,
+ name_real=self._user_email,
+ name_email=self._user_email,
+ name_comment='Generated by LEAP Soledad.')
self._gpg.gen_key(params)
def _load_openpgp_keypair(self):
@@ -129,7 +135,8 @@ class Soledad(object):
Encrypt data.
"""
return str(self._gpg.encrypt(data, self._fingerprint, sign=sign,
- passphrase=passphrase, symmetric=symmetric))
+ passphrase=passphrase,
+ symmetric=symmetric))
def encrypt_symmetric(self, doc_id, data, sign=None):
"""
@@ -208,4 +215,3 @@ class Soledad(object):
return self._db.sync(url, creds=None, autocreate=True, soledad=self)
__all__ = ['util']
-
diff --git a/backends/couch.py b/backends/couch.py
index 78026af8..8ba42d78 100644
--- a/backends/couch.py
+++ b/backends/couch.py
@@ -1,7 +1,5 @@
-import sys
import uuid
from base64 import b64encode, b64decode
-from u1db import errors
from u1db.sync import LocalSyncTarget
from couchdb.client import Server, Document as CouchDocument
from couchdb.http import ResourceNotFound
@@ -17,7 +15,8 @@ except ImportError:
class CouchDatabase(ObjectStore):
"""A U1DB implementation that uses Couch as its persistence layer."""
- def __init__(self, url, database, replica_uid=None, full_commit=True, session=None):
+ def __init__(self, url, database, replica_uid=None, full_commit=True,
+ session=None):
"""Create a new Couch data container."""
self._url = url
self._full_commit = full_commit
@@ -54,8 +53,9 @@ class CouchDatabase(ObjectStore):
doc_id=doc_id,
rev=cdoc['u1db_rev'],
has_conflicts=has_conflicts)
- if cdoc['u1db_json'] is not None:
- doc.content = json.loads(cdoc['u1db_json'])
+ contents = self._database.get_attachment(cdoc, 'u1db_json')
+ if contents:
+ doc.content = json.loads(contents.getvalue())
else:
doc.make_tombstone()
return doc
@@ -83,13 +83,14 @@ class CouchDatabase(ObjectStore):
cdoc['_rev'] = old_cdoc['_rev']
# store u1db's rev
cdoc['u1db_rev'] = doc.rev
+ # save doc in db
+ self._database.save(cdoc)
# store u1db's content as json string
if not doc.is_tombstone():
- cdoc['u1db_json'] = doc.get_json()
+ self._database.put_attachment(cdoc, doc.get_json(),
+ filename='u1db_json')
else:
- cdoc['u1db_json'] = None
- # save doc in db
- self._database.save(cdoc)
+ self._database.delete_attachment(cdoc, 'u1db_json')
def get_sync_target(self):
return CouchSyncTarget(self)
@@ -103,7 +104,6 @@ class CouchDatabase(ObjectStore):
#self._server = None
self._database = None
return True
-
def sync(self, url, creds=None, autocreate=True):
from u1db.sync import Synchronizer
@@ -114,15 +114,16 @@ class CouchDatabase(ObjectStore):
if self._replica_uid is None:
self._replica_uid = uuid.uuid4().hex
doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID)
- doc.content = { 'sync_log' : [],
- 'transaction_log' : [],
- 'conflict_log' : b64encode(json.dumps([])),
- 'replica_uid' : self._replica_uid }
+ doc.content = {'sync_log': [],
+ 'transaction_log': [],
+ 'conflict_log': b64encode(json.dumps([])),
+ 'replica_uid': self._replica_uid}
self._put_doc(doc)
def _get_u1db_data(self):
cdoc = self._database.get(self.U1DB_DATA_DOC_ID)
- content = json.loads(cdoc['u1db_json'])
+ jsonstr = self._database.get_attachment(cdoc, 'u1db_json').getvalue()
+ content = json.loads(jsonstr)
self._sync_log.log = content['sync_log']
self._transaction_log.log = content['transaction_log']
self._conflict_log.log = json.loads(b64decode(content['conflict_log']))
@@ -131,14 +132,15 @@ class CouchDatabase(ObjectStore):
def _set_u1db_data(self):
doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID)
- doc.content = { 'sync_log' : self._sync_log.log,
- 'transaction_log' : self._transaction_log.log,
- # Here, the b64 encode ensures that document content
- # does not cause strange behaviour in couchdb because
- # of encoding.
- 'conflict_log' : b64encode(json.dumps(self._conflict_log.log)),
- 'replica_uid' : self._replica_uid,
- '_rev' : self._couch_rev}
+ doc.content = {
+ 'sync_log': self._sync_log.log,
+ 'transaction_log': self._transaction_log.log,
+ # Here, the b64 encode ensures that document content
+ # does not cause strange behaviour in couchdb because
+ # of encoding.
+ 'conflict_log': b64encode(json.dumps(self._conflict_log.log)),
+ 'replica_uid': self._replica_uid,
+ '_rev': self._couch_rev}
self._put_doc(doc)
#-------------------------------------------------------------------------
@@ -166,4 +168,3 @@ class CouchSyncTarget(LocalSyncTarget):
self._db._set_replica_gen_and_trans_id(
source_replica_uid, source_replica_generation,
source_replica_transaction_id)
-
diff --git a/backends/leap_backend.py b/backends/leap_backend.py
index 3e859f7c..7e98dd45 100644
--- a/backends/leap_backend.py
+++ b/backends/leap_backend.py
@@ -41,8 +41,9 @@ class LeapDocument(Document):
"""
if not self._soledad:
raise NoSoledadInstance()
- ciphertext = self._soledad.encrypt_symmetric(self.doc_id, self.get_json())
- return json.dumps({'_encrypted_json' : ciphertext})
+ ciphertext = self._soledad.encrypt_symmetric(self.doc_id,
+ self.get_json())
+ return json.dumps({'_encrypted_json': ciphertext})
def set_encrypted_json(self, encrypted_json):
"""
@@ -89,7 +90,8 @@ class LeapDatabase(HTTPDatabase):
doc_id = self._allocate_doc_id()
res, headers = self._request_json('PUT', ['doc', doc_id], {},
content, 'application/json')
- new_doc = self._factory(doc_id, res['rev'], content, soledad=self._soledad)
+ new_doc = self._factory(doc_id, res['rev'], content,
+ soledad=self._soledad)
return new_doc
diff --git a/backends/objectstore.py b/backends/objectstore.py
index b6523336..d72a2ecc 100644
--- a/backends/objectstore.py
+++ b/backends/objectstore.py
@@ -1,6 +1,7 @@
from u1db.backends import CommonBackend
from u1db import errors, Document, vectorclock
+
class ObjectStore(CommonBackend):
"""
A backend for storing u1db data in an object store.
@@ -139,12 +140,13 @@ class ObjectStore(CommonBackend):
def _set_replica_gen_and_trans_id(self, other_replica_uid,
other_generation, other_transaction_id):
return self._do_set_replica_gen_and_trans_id(
- other_replica_uid,
- other_generation,
- other_transaction_id)
+ other_replica_uid,
+ other_generation,
+ other_transaction_id)
def _do_set_replica_gen_and_trans_id(self, other_replica_uid,
- other_generation, other_transaction_id):
+ other_generation,
+ other_transaction_id):
self._sync_log.set_replica_gen_and_trans_id(other_replica_uid,
other_generation,
other_transaction_id)
@@ -201,7 +203,6 @@ class ObjectStore(CommonBackend):
"""
Verify if u1db data exists in store.
"""
- doc = self._get_doc(self.U1DB_DATA_DOC_ID)
if not self._get_doc(self.U1DB_DATA_DOC_ID):
return False
return True
@@ -234,7 +235,6 @@ class ObjectStore(CommonBackend):
replica_uid = property(
_get_replica_uid, _set_replica_uid, doc="Replica UID of the database")
-
#-------------------------------------------------------------------------
# The methods below were cloned from u1db sqlite backend. They should at
# least exist and raise a NotImplementedError exception in CommonBackend
@@ -387,12 +387,12 @@ class TransactionLog(SimpleLog):
return cur_gen, newest_trans_id, changes
-
def get_transaction_log(self):
"""
Return only a list of (doc_id, transaction_id)
"""
- return map(lambda x: (x[1], x[2]), sorted(self._get_log(reverse=False)))
+ return map(lambda x: (x[1], x[2]),
+ sorted(self._get_log(reverse=False)))
class SyncLog(SimpleLog):
@@ -416,7 +416,7 @@ class SyncLog(SimpleLog):
return (info[1], info[2])
def set_replica_gen_and_trans_id(self, other_replica_uid,
- other_generation, other_transaction_id):
+ other_generation, other_transaction_id):
"""
Set the last-known generation and transaction id for the other
database replica.
@@ -425,6 +425,7 @@ class SyncLog(SimpleLog):
self.append((other_replica_uid, other_generation,
other_transaction_id))
+
class ConflictLog(SimpleLog):
"""
A list of (doc_id, my_doc_rev, my_content) tuples.
@@ -433,7 +434,7 @@ class ConflictLog(SimpleLog):
def __init__(self, factory):
super(ConflictLog, self).__init__()
self._factory = factory
-
+
def delete_conflicts(self, conflicts):
for conflict in conflicts:
self._set_log(self.filter(lambda x:
@@ -448,4 +449,3 @@ class ConflictLog(SimpleLog):
def has_conflicts(self, doc_id):
return bool(self.filter(lambda x: x[0] == doc_id))
-
diff --git a/backends/openstack.py b/backends/openstack.py
index c027231c..a9615736 100644
--- a/backends/openstack.py
+++ b/backends/openstack.py
@@ -1,6 +1,6 @@
-from u1db import errors
+# TODO: this backend is not tested yet.
from u1db.remote.http_target import HTTPSyncTarget
-from swiftclient import client
+import swiftclient
from soledad.backends.objectstore import ObjectStore
@@ -25,12 +25,13 @@ class OpenStackDatabase(ObjectStore):
def _get_doc(self, doc_id, check_for_conflicts=False):
"""Get just the document content, without fancy handling.
-
+
Conflicts do not happen on server side, so there's no need to check
for them.
"""
try:
- response, contents = self._connection.get_object(self._container, doc_id)
+ response, contents = self._connection.get_object(self._container,
+ doc_id)
# TODO: change revision to be a dictionary element?
rev = response['x-object-meta-rev']
return self._factory(doc_id, rev, contents)
@@ -53,7 +54,7 @@ class OpenStackDatabase(ObjectStore):
def _put_doc(self, doc, new_rev):
new_rev = self._allocate_doc_rev(doc.rev)
# TODO: change revision to be a dictionary element?
- headers = { 'X-Object-Meta-Rev' : new_rev }
+ headers = {'X-Object-Meta-Rev': new_rev}
self._connection.put_object(self._container, doc_id, doc.get_json(),
headers=headers)
@@ -77,6 +78,7 @@ class OpenStackDatabase(ObjectStore):
self._url, self._auth_token = self._connection.get_auth()
return self._url, self.auth_token
+
class OpenStackSyncTarget(HTTPSyncTarget):
def get_sync_info(self, source_replica_uid):
@@ -94,5 +96,3 @@ class OpenStackSyncTarget(HTTPSyncTarget):
self._db._set_replica_gen_and_trans_id(
source_replica_uid, source_replica_generation,
source_replica_transaction_id)
-
-
diff --git a/backends/sqlcipher.py b/backends/sqlcipher.py
index 3d03449e..08b4df43 100644
--- a/backends/sqlcipher.py
+++ b/backends/sqlcipher.py
@@ -59,11 +59,9 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase):
_index_storage_value = 'expand referenced encrypted'
-
@classmethod
def set_pragma_key(cls, db_handle, key):
- db_handle.cursor().execute("PRAGMA key = '%s'" % key)
-
+ db_handle.cursor().execute("PRAGMA key = '%s'" % key)
def __init__(self, sqlite_file, password, document_factory=None):
"""Create a new sqlcipher file."""
@@ -74,20 +72,18 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase):
self._ensure_schema()
self._factory = document_factory or Document
-
def _check_if_db_is_encrypted(self, sqlite_file):
if not os.path.exists(sqlite_file):
return
else:
try:
- # try to open an encrypted database with the regular u1db backend
- # should raise a DatabaseError exception.
+ # try to open an encrypted database with the regular u1db
+ # backend should raise a DatabaseError exception.
SQLitePartialExpandDatabase(sqlite_file)
raise DatabaseIsNotEncrypted()
except DatabaseError:
pass
-
@classmethod
def _open_database(cls, sqlite_file, password, document_factory=None):
if not os.path.isfile(sqlite_file):
@@ -113,7 +109,6 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase):
return SQLCipherDatabase._sqlite_registry[v](
sqlite_file, password, document_factory=document_factory)
-
@classmethod
def open_database(cls, sqlite_file, password, create, backend_cls=None,
document_factory=None):
@@ -129,7 +124,6 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase):
return backend_cls(sqlite_file, password,
document_factory=document_factory)
-
def sync(self, url, creds=None, autocreate=True, soledad=None):
"""
Synchronize encrypted documents with remote replica exposed at url.
@@ -137,9 +131,7 @@ class SQLCipherDatabase(SQLitePartialExpandDatabase):
from u1db.sync import Synchronizer
from leap.soledad.backends.leap_backend import LeapSyncTarget
return Synchronizer(self, LeapSyncTarget(url, creds=creds),
- soledad=self._soledad).sync(
- autocreate=autocreate)
+ soledad=self._soledad).sync(autocreate=autocreate)
SQLiteDatabase.register_implementation(SQLCipherDatabase)
-
diff --git a/tests/test_couch.py b/tests/test_couch.py
index 6b5875b8..b7fab193 100644
--- a/tests/test_couch.py
+++ b/tests/test_couch.py
@@ -14,6 +14,7 @@ try:
except ImportError:
import json # noqa
+
#-----------------------------------------------------------------------------
# The following tests come from `u1db.tests.test_common_backend`.
#-----------------------------------------------------------------------------
@@ -37,8 +38,10 @@ def make_couch_database_for_test(test, replica_uid):
return couch.CouchDatabase('http://localhost:5984', replica_uid,
replica_uid=replica_uid or 'test')
+
def copy_couch_database_for_test(test, db):
- new_db = couch.CouchDatabase('http://localhost:5984', db._replica_uid+'_copy',
+ new_db = couch.CouchDatabase('http://localhost:5984',
+ db._replica_uid+'_copy',
replica_uid=db._replica_uid or 'test')
gen, docs = db.get_all_docs(include_deleted=True)
for doc in docs:
@@ -51,10 +54,10 @@ def copy_couch_database_for_test(test, db):
COUCH_SCENARIOS = [
- ('couch', {'make_database_for_test': make_couch_database_for_test,
- 'copy_database_for_test': copy_couch_database_for_test,
- 'make_document_for_test': tests.make_document_for_test,}),
- ]
+ ('couch', {'make_database_for_test': make_couch_database_for_test,
+ 'copy_database_for_test': copy_couch_database_for_test,
+ 'make_document_for_test': tests.make_document_for_test, }),
+]
class CouchTests(test_backends.AllDatabaseTests):
@@ -75,7 +78,8 @@ class CouchDatabaseTests(test_backends.LocalDatabaseTests):
super(CouchDatabaseTests, self).tearDown()
-class CouchValidateGenNTransIdTests(test_backends.LocalDatabaseValidateGenNTransIdTests):
+class CouchValidateGenNTransIdTests(
+ test_backends.LocalDatabaseValidateGenNTransIdTests):
scenarios = COUCH_SCENARIOS
@@ -84,7 +88,8 @@ class CouchValidateGenNTransIdTests(test_backends.LocalDatabaseValidateGenNTrans
super(CouchValidateGenNTransIdTests, self).tearDown()
-class CouchValidateSourceGenTests(test_backends.LocalDatabaseValidateSourceGenTests):
+class CouchValidateSourceGenTests(
+ test_backends.LocalDatabaseValidateSourceGenTests):
scenarios = COUCH_SCENARIOS
@@ -93,7 +98,8 @@ class CouchValidateSourceGenTests(test_backends.LocalDatabaseValidateSourceGenTe
super(CouchValidateSourceGenTests, self).tearDown()
-class CouchWithConflictsTests(test_backends.LocalDatabaseWithConflictsTests):
+class CouchWithConflictsTests(
+ test_backends.LocalDatabaseWithConflictsTests):
scenarios = COUCH_SCENARIOS
@@ -115,7 +121,6 @@ class CouchWithConflictsTests(test_backends.LocalDatabaseWithConflictsTests):
# super(CouchIndexTests, self).tearDown()
-
#-----------------------------------------------------------------------------
# The following tests come from `u1db.tests.test_sync`.
#-----------------------------------------------------------------------------
@@ -166,12 +171,13 @@ for name, scenario in COUCH_SCENARIOS:
sync_scenarios.append((name, scenario))
scenario = dict(scenario)
+
class CouchDatabaseSyncTests(test_sync.DatabaseSyncTests):
scenarios = sync_scenarios
def setUp(self):
- self.db = None
+ self.db = None
self.db1 = None
self.db2 = None
self.db3 = None
@@ -209,5 +215,4 @@ class CouchDatabaseSyncTests(test_sync.DatabaseSyncTests):
pass
-
load_tests = tests.load_with_scenarios
diff --git a/tests/test_encrypted.py b/tests/test_encrypted.py
index 8cb6dc51..af5f0fa4 100644
--- a/tests/test_encrypted.py
+++ b/tests/test_encrypted.py
@@ -8,11 +8,11 @@ from leap.soledad.backends.leap_backend import LeapDocument
class EncryptedSyncTestCase(unittest.TestCase):
- PREFIX = "/var/tmp"
+ PREFIX = "/var/tmp"
GNUPG_HOME = "%s/gnupg" % PREFIX
- DB1_FILE = "%s/db1.u1db" % PREFIX
- DB2_FILE = "%s/db2.u1db" % PREFIX
- EMAIL = 'leap@leap.se'
+ DB1_FILE = "%s/db1.u1db" % PREFIX
+ DB2_FILE = "%s/db2.u1db" % PREFIX
+ EMAIL = 'leap@leap.se'
def setUp(self):
self.db1 = u1db.open(self.DB1_FILE, create=True,
@@ -29,10 +29,10 @@ class EncryptedSyncTestCase(unittest.TestCase):
def test_get_set_encrypted(self):
doc1 = LeapDocument(soledad=self.soledad)
- doc1.content = { 'key' : 'val' }
+ doc1.content = {'key': 'val'}
doc2 = LeapDocument(doc_id=doc1.doc_id,
- encrypted_json=doc1.get_encrypted_json(),
- soledad=self.soledad)
+ encrypted_json=doc1.get_encrypted_json(),
+ soledad=self.soledad)
res1 = doc1.get_json()
res2 = doc2.get_json()
self.assertEqual(res1, res2, 'incorrect document encryption')
diff --git a/tests/test_leap_backend.py b/tests/test_leap_backend.py
index f19eb360..c19ca666 100644
--- a/tests/test_leap_backend.py
+++ b/tests/test_leap_backend.py
@@ -33,11 +33,11 @@ from leap.soledad.tests.test_encrypted import (
class SoledadTest(unittest.TestCase):
- PREFIX = "/var/tmp"
+ PREFIX = "/var/tmp"
GNUPG_HOME = "%s/gnupg" % PREFIX
- DB1_FILE = "%s/db1.u1db" % PREFIX
- DB2_FILE = "%s/db2.u1db" % PREFIX
- EMAIL = 'leap@leap.se'
+ DB1_FILE = "%s/db1.u1db" % PREFIX
+ DB2_FILE = "%s/db2.u1db" % PREFIX
+ EMAIL = 'leap@leap.se'
def setUp(self):
super(SoledadTest, self).setUp()
@@ -102,7 +102,8 @@ def make_document_for_test(test, doc_id, rev, content, has_conflicts=False):
doc_id, rev, content, has_conflicts=has_conflicts)
-def make_leap_document_for_test(test, doc_id, rev, content, has_conflicts=False):
+def make_leap_document_for_test(test, doc_id, rev, content,
+ has_conflicts=False):
return leap_backend.LeapDocument(
doc_id, rev, content, has_conflicts=has_conflicts,
soledad=test._soledad)
@@ -117,11 +118,11 @@ def make_leap_encrypted_document_for_test(test, doc_id, rev, encrypted_content,
LEAP_SCENARIOS = [
- ('http', {'make_database_for_test': make_leap_database_for_test,
- 'copy_database_for_test': copy_leap_database_for_test,
- 'make_document_for_test': make_leap_document_for_test,
- 'make_app_with_state': make_http_app}),
- ]
+ ('http', {'make_database_for_test': make_leap_database_for_test,
+ 'copy_database_for_test': copy_leap_database_for_test,
+ 'make_document_for_test': make_leap_document_for_test,
+ 'make_app_with_state': make_http_app}),
+]
class LeapTests(test_backends.AllDatabaseTests, SoledadTest):
@@ -133,24 +134,26 @@ class LeapTests(test_backends.AllDatabaseTests, SoledadTest):
# The following tests come from `u1db.tests.test_http_database`.
#-----------------------------------------------------------------------------
-class TestLeapDatabaseSimpleOperations(test_http_database.TestHTTPDatabaseSimpleOperations):
+class TestLeapDatabaseSimpleOperations(
+ test_http_database.TestHTTPDatabaseSimpleOperations):
def setUp(self):
- super(test_http_database.TestHTTPDatabaseSimpleOperations, self).setUp()
+ super(test_http_database.TestHTTPDatabaseSimpleOperations,
+ self).setUp()
self.db = leap_backend.LeapDatabase('dbase')
self.db._conn = object() # crash if used
self.got = None
self.response_val = None
def _request(method, url_parts, params=None, body=None,
- content_type=None):
+ content_type=None):
self.got = method, url_parts, params, body, content_type
if isinstance(self.response_val, Exception):
raise self.response_val
return self.response_val
def _request_json(method, url_parts, params=None, body=None,
- content_type=None):
+ content_type=None):
self.got = method, url_parts, params, body, content_type
if isinstance(self.response_val, Exception):
raise self.response_val
@@ -165,11 +168,13 @@ class TestLeapDatabaseSimpleOperations(test_http_database.TestHTTPDatabaseSimple
self.assertEqual(st._url, self.db._url)
-class TestLeapDatabaseCtrWithCreds(test_http_database.TestHTTPDatabaseCtrWithCreds):
+class TestLeapDatabaseCtrWithCreds(
+ test_http_database.TestHTTPDatabaseCtrWithCreds):
pass
-class TestLeapDatabaseIntegration(test_http_database.TestHTTPDatabaseIntegration):
+class TestLeapDatabaseIntegration(
+ test_http_database.TestHTTPDatabaseIntegration):
def test_non_existing_db(self):
db = leap_backend.LeapDatabase(self.getURL('not-there'))
@@ -190,7 +195,7 @@ class TestLeapDatabaseIntegration(test_http_database.TestHTTPDatabaseIntegration
def test_open_database_existing(self):
self.request_state._create_database('db0')
db = leap_backend.LeapDatabase.open_database(self.getURL('db0'),
- create=False)
+ create=False)
self.assertIs(None, db.get_doc('doc1'))
def test_open_database_non_existing(self):
@@ -201,7 +206,7 @@ class TestLeapDatabaseIntegration(test_http_database.TestHTTPDatabaseIntegration
def test_open_database_create(self):
db = leap_backend.LeapDatabase.open_database(self.getURL('new'),
- create=True)
+ create=True)
self.assertIs(None, db.get_doc('doc1'))
def test_delete_database_existing(self):
@@ -213,7 +218,7 @@ class TestLeapDatabaseIntegration(test_http_database.TestHTTPDatabaseIntegration
def test_doc_ids_needing_quoting(self):
db0 = self.request_state._create_database('db0')
db = leap_backend.LeapDatabase.open_database(self.getURL('db0'),
- create=False)
+ create=False)
doc = leap_backend.LeapDocument('%fff', None, '{}')
db.put_doc(doc)
self.assertGetDoc(db0, '%fff', doc.rev, '{}', False)
@@ -248,7 +253,8 @@ class TestLeapPyDocument(test_document.TestPyDocument, SoledadTest):
# The following tests come from `u1db.tests.test_remote_sync_target`.
#-----------------------------------------------------------------------------
-class TestLeapSyncTargetBasics(test_remote_sync_target.TestHTTPSyncTargetBasics):
+class TestLeapSyncTargetBasics(
+ test_remote_sync_target.TestHTTPSyncTargetBasics):
def test_parse_url(self):
remote_target = leap_backend.LeapSyncTarget('http://127.0.0.1:12345/')
@@ -257,6 +263,7 @@ class TestLeapSyncTargetBasics(test_remote_sync_target.TestHTTPSyncTargetBasics)
self.assertEqual(12345, remote_target._url.port)
self.assertEqual('/', remote_target._url.path)
+
class TestLeapParsingSyncStream(test_remote_sync_target.TestParsingSyncStream):
def test_wrong_start(self):
@@ -344,7 +351,7 @@ class TestRemoteSyncTargets(tests.TestCaseWithServer):
('oauth_http', {'make_app_with_state': make_oauth_http_app,
'make_document_for_test': make_leap_document_for_test,
'sync_target': oauth_leap_sync_target}),
- ]
+ ]
#-----------------------------------------------------------------------------
@@ -358,14 +365,15 @@ def oauth_https_sync_target(test, host, path):
tests.token1.key, tests.token1.secret)
return st
-class TestLeapSyncTargetHttpsSupport(test_https.TestHttpSyncTargetHttpsSupport, SoledadTest):
+
+class TestLeapSyncTargetHttpsSupport(test_https.TestHttpSyncTargetHttpsSupport,
+ SoledadTest):
scenarios = [
('oauth_https', {'server_def': test_https.https_server_def,
'make_app_with_state': make_oauth_http_app,
'make_document_for_test': make_leap_document_for_test,
- 'sync_target': oauth_https_sync_target
- }),
- ]
+ 'sync_target': oauth_https_sync_target,
+ }), ]
load_tests = tests.load_with_scenarios
diff --git a/tests/test_logs.py b/tests/test_logs.py
index 0be0d1f9..3dfeff75 100644
--- a/tests/test_logs.py
+++ b/tests/test_logs.py
@@ -1,14 +1,18 @@
import unittest2 as unittest
-from leap.soledad.backends.objectstore import TransactionLog, SyncLog, ConflictLog
+from leap.soledad.backends.objectstore import (
+ TransactionLog,
+ SyncLog,
+ ConflictLog
+)
class LogTestCase(unittest.TestCase):
def test_transaction_log(self):
data = [
- (2, "doc_3", "tran_3"),
- (3, "doc_2", "tran_2"),
- (1, "doc_1", "tran_1")
+ (2, "doc_3", "tran_3"),
+ (3, "doc_2", "tran_2"),
+ (1, "doc_1", "tran_1")
]
log = TransactionLog()
log.log = data
@@ -24,49 +28,55 @@ class LogTestCase(unittest.TestCase):
def test_sync_log(self):
data = [
- ("replica_3", 3, "tran_3"),
- ("replica_2", 2, "tran_2"),
- ("replica_1", 1, "tran_1")
+ ("replica_3", 3, "tran_3"),
+ ("replica_2", 2, "tran_2"),
+ ("replica_1", 1, "tran_1")
]
log = SyncLog()
log.log = data
# test getting
self.assertEqual(log.get_replica_gen_and_trans_id('replica_3'),
- (3, 'tran_3'), 'error getting replica gen and trans id')
+ (3, 'tran_3'),
+ 'error getting replica gen and trans id')
self.assertEqual(log.get_replica_gen_and_trans_id('replica_2'),
- (2, 'tran_2'), 'error getting replica gen and trans id')
+ (2, 'tran_2'),
+ 'error getting replica gen and trans id')
self.assertEqual(log.get_replica_gen_and_trans_id('replica_1'),
- (1, 'tran_1'), 'error getting replica gen and trans id')
+ (1, 'tran_1'),
+ 'error getting replica gen and trans id')
# test setting
log.set_replica_gen_and_trans_id('replica_1', 2, 'tran_12')
self.assertEqual(len(log._data), 3, 'error in log size after setting')
self.assertEqual(log.get_replica_gen_and_trans_id('replica_1'),
- (2, 'tran_12'), 'error setting replica gen and trans id')
+ (2, 'tran_12'),
+ 'error setting replica gen and trans id')
self.assertEqual(log.get_replica_gen_and_trans_id('replica_2'),
- (2, 'tran_2'), 'error setting replica gen and trans id')
+ (2, 'tran_2'),
+ 'error setting replica gen and trans id')
self.assertEqual(log.get_replica_gen_and_trans_id('replica_3'),
- (3, 'tran_3'), 'error setting replica gen and trans id')
+ (3, 'tran_3'),
+ 'error setting replica gen and trans id')
def test_whats_changed(self):
data = [
(1, "doc_1", "tran_1"),
(2, "doc_2", "tran_2"),
(3, "doc_3", "tran_3")
- ]
+ ]
log = TransactionLog()
log.log = data
self.assertEqual(
- log.whats_changed(3),
- (3, "tran_3", []),
- 'error getting whats changed.')
+ log.whats_changed(3),
+ (3, "tran_3", []),
+ 'error getting whats changed.')
self.assertEqual(
- log.whats_changed(2),
- (3, "tran_3", [("doc_3",3,"tran_3")]),
- 'error getting whats changed.')
+ log.whats_changed(2),
+ (3, "tran_3", [("doc_3", 3, "tran_3")]),
+ 'error getting whats changed.')
self.assertEqual(
- log.whats_changed(1),
- (3, "tran_3", [("doc_2",2,"tran_2"),("doc_3",3,"tran_3")]),
- 'error getting whats changed.')
+ log.whats_changed(1),
+ (3, "tran_3", [("doc_2", 2, "tran_2"), ("doc_3", 3, "tran_3")]),
+ 'error getting whats changed.')
def test_conflict_log(self):
# TODO: include tests for `get_conflicts` and `has_conflicts`.
@@ -75,13 +85,12 @@ class LogTestCase(unittest.TestCase):
('3', 'my:1', 'irrelevant')]
log = ConflictLog(None)
log.log = data
- log.delete_conflicts([('1','my:1'),('2','my:1')])
+ log.delete_conflicts([('1', 'my:1'), ('2', 'my:1')])
self.assertEqual(
- log.log,
- [('3', 'my:1', 'irrelevant')],
- 'error deleting conflicts.')
+ log.log,
+ [('3', 'my:1', 'irrelevant')],
+ 'error deleting conflicts.')
if __name__ == '__main__':
unittest.main()
-
diff --git a/tests/test_sqlcipher.py b/tests/test_sqlcipher.py
index 9e3b4052..d2fe0b11 100644
--- a/tests/test_sqlcipher.py
+++ b/tests/test_sqlcipher.py
@@ -11,7 +11,7 @@ import threading
from u1db import (
errors,
query_parser,
- )
+)
from u1db.backends.sqlite_backend import SQLitePartialExpandDatabase
# soledad stuff.
@@ -29,6 +29,7 @@ from leap.soledad.tests.u1db_tests import test_open
PASSWORD = '123456'
+
#-----------------------------------------------------------------------------
# The following tests come from `u1db.tests.test_common_backend`.
#-----------------------------------------------------------------------------
@@ -77,8 +78,8 @@ def copy_sqlcipher_database_for_test(test, db):
SQLCIPHER_SCENARIOS = [
('sqlcipher', {'make_database_for_test': make_sqlcipher_database_for_test,
'copy_database_for_test': copy_sqlcipher_database_for_test,
- 'make_document_for_test': tests.make_document_for_test,}),
- ]
+ 'make_document_for_test': tests.make_document_for_test, }),
+]
class SQLCipherTests(test_backends.AllDatabaseTests):
@@ -89,15 +90,18 @@ class SQLCipherDatabaseTests(test_backends.LocalDatabaseTests):
scenarios = SQLCIPHER_SCENARIOS
-class SQLCipherValidateGenNTransIdTests(test_backends.LocalDatabaseValidateGenNTransIdTests):
+class SQLCipherValidateGenNTransIdTests(
+ test_backends.LocalDatabaseValidateGenNTransIdTests):
scenarios = SQLCIPHER_SCENARIOS
-class SQLCipherValidateSourceGenTests(test_backends.LocalDatabaseValidateSourceGenTests):
+class SQLCipherValidateSourceGenTests(
+ test_backends.LocalDatabaseValidateSourceGenTests):
scenarios = SQLCIPHER_SCENARIOS
-class SQLCipherWithConflictsTests(test_backends.LocalDatabaseWithConflictsTests):
+class SQLCipherWithConflictsTests(
+ test_backends.LocalDatabaseWithConflictsTests):
scenarios = SQLCIPHER_SCENARIOS
@@ -157,14 +161,16 @@ class TestSQLCipherDatabase(test_sqlite_backend.TestSQLiteDatabase):
self.assertTrue(db2._is_initialized(db1._get_sqlite_handle().cursor()))
-class TestSQLCipherPartialExpandDatabase(test_sqlite_backend.TestSQLitePartialExpandDatabase):
+class TestSQLCipherPartialExpandDatabase(
+ test_sqlite_backend.TestSQLitePartialExpandDatabase):
# The following tests had to be cloned from u1db because they all
# instantiate the backend directly, so we need to change that in order to
# our backend be instantiated in place.
def setUp(self):
- super(test_sqlite_backend.TestSQLitePartialExpandDatabase, self).setUp()
+ super(test_sqlite_backend.TestSQLitePartialExpandDatabase,
+ self).setUp()
self.db = SQLCipherDatabase(':memory:', PASSWORD)
self.db._set_replica_uid('test')
@@ -216,7 +222,8 @@ class TestSQLCipherPartialExpandDatabase(test_sqlite_backend.TestSQLitePartialEx
path = temp_dir + '/test.sqlite'
SQLCipherDatabase(path, PASSWORD)
db2 = SQLCipherDatabase._open_database(
- path, PASSWORD, document_factory=test_backends.TestAlternativeDocument)
+ path, PASSWORD,
+ document_factory=test_backends.TestAlternativeDocument)
self.assertEqual(test_backends.TestAlternativeDocument, db2._factory)
def test_open_database_existing(self):
@@ -231,7 +238,8 @@ class TestSQLCipherPartialExpandDatabase(test_sqlite_backend.TestSQLitePartialEx
path = temp_dir + '/existing.sqlite'
SQLCipherDatabase(path, PASSWORD)
db2 = SQLCipherDatabase.open_database(
- path, PASSWORD, create=False, document_factory=test_backends.TestAlternativeDocument)
+ path, PASSWORD, create=False,
+ document_factory=test_backends.TestAlternativeDocument)
self.assertEqual(test_backends.TestAlternativeDocument, db2._factory)
def test_create_database_initializes_schema(self):
@@ -244,7 +252,8 @@ class TestSQLCipherPartialExpandDatabase(test_sqlite_backend.TestSQLitePartialEx
c.execute("SELECT * FROM u1db_config")
config = dict([(r[0], r[1]) for r in c.fetchall()])
self.assertEqual({'sql_schema': '0', 'replica_uid': 'test',
- 'index_storage': 'expand referenced encrypted'}, config)
+ 'index_storage': 'expand referenced encrypted'},
+ config)
#-----------------------------------------------------------------------------
@@ -289,6 +298,7 @@ class SQLCipherOpen(test_open.TestU1DBOpen):
self.addCleanup(db2.close)
self.assertIsInstance(db2, SQLCipherDatabase)
+
#-----------------------------------------------------------------------------
# Tests for actual encryption of the database
#-----------------------------------------------------------------------------
@@ -313,8 +323,8 @@ class SQLCipherEncryptionTest(unittest.TestCase):
doc = db.create_doc_from_json(tests.simple_doc)
db.close()
try:
- # trying to open an encrypted database with the regular u1db backend
- # should raise a DatabaseError exception.
+ # trying to open an encrypted database with the regular u1db
+ # backend should raise a DatabaseError exception.
SQLitePartialExpandDatabase(self.DB_FILE)
raise DatabaseIsNotEncrypted()
except DatabaseError:
@@ -323,16 +333,18 @@ class SQLCipherEncryptionTest(unittest.TestCase):
# encrypted.
db = SQLCipherDatabase(self.DB_FILE, PASSWORD)
doc = db.get_doc(doc.doc_id)
- self.assertEqual(tests.simple_doc, doc.get_json(), 'decrypted content mismatch')
+ self.assertEqual(tests.simple_doc, doc.get_json(),
+ 'decrypted content mismatch')
def test_try_to_open_raw_db_with_sqlcipher_backend(self):
db = SQLitePartialExpandDatabase(self.DB_FILE)
db.create_doc_from_json(tests.simple_doc)
db.close()
try:
- # trying to open the a non-encrypted database with sqlcipher backend
- # should raise a DatabaseIsNotEncrypted exception.
+ # trying to open the a non-encrypted database with sqlcipher
+ # backend should raise a DatabaseIsNotEncrypted exception.
SQLCipherDatabase(self.DB_FILE, PASSWORD)
- raise DatabaseError("SQLCipher backend should not be able to open non-encrypted dbs.")
+ raise DatabaseError("SQLCipher backend should not be able to open "
+ "non-encrypted dbs.")
except DatabaseIsNotEncrypted:
pass
diff --git a/tests/u1db_tests/__init__.py b/tests/u1db_tests/__init__.py
index 167077f7..27aa4d79 100644
--- a/tests/u1db_tests/__init__.py
+++ b/tests/u1db_tests/__init__.py
@@ -39,26 +39,14 @@ import testtools
from u1db import (
errors,
Document,
- )
+)
from u1db.backends import (
inmemory,
sqlite_backend,
- )
+)
from u1db.remote import (
server_state,
- )
-
-try:
- from leap.soledad.tests.u1db_tests import c_backend_wrapper
- c_backend_error = None
-except ImportError, e:
- c_backend_wrapper = None # noqa
- c_backend_error = e
-
-# Setting this means that failing assertions will not include this module in
-# their traceback. However testtools doesn't seem to set it, and we don't want
-# this level to be omitted, but the lower levels to be shown.
-# __unittest = 1
+)
class TestCase(testtools.TestCase):
@@ -102,13 +90,16 @@ class TestCase(testtools.TestCase):
database, however the rest can be returned in any order.
"""
if conflicts:
- conflicts = [(rev, (json.loads(cont) if isinstance(cont, basestring)
+ conflicts = [(rev,
+ (json.loads(cont) if isinstance(cont, basestring)
else cont)) for (rev, cont) in conflicts]
conflicts = conflicts[:1] + sorted(conflicts[1:])
actual = db.get_doc_conflicts(doc_id)
if actual:
- actual = [(doc.rev, (json.loads(doc.get_json())
- if doc.get_json() is not None else None)) for doc in actual]
+ actual = [
+ (doc.rev, (json.loads(doc.get_json())
+ if doc.get_json() is not None else None))
+ for doc in actual]
actual = actual[:1] + sorted(actual[1:])
self.assertEqual(conflicts, actual)
@@ -179,49 +170,16 @@ def make_document_for_test(test, doc_id, rev, content, has_conflicts=False):
return Document(doc_id, rev, content, has_conflicts=has_conflicts)
-def make_c_database_for_test(test, replica_uid):
- if c_backend_wrapper is None:
- test.skipTest('c_backend_wrapper is not available')
- db = c_backend_wrapper.CDatabase(':memory:')
- db._set_replica_uid(replica_uid)
- return db
-
-
-def copy_c_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- if c_backend_wrapper is None:
- test.skipTest('c_backend_wrapper is not available')
- new_db = db._copy(db)
- return new_db
-
-
-def make_c_document_for_test(test, doc_id, rev, content, has_conflicts=False):
- if c_backend_wrapper is None:
- test.skipTest('c_backend_wrapper is not available')
- return c_backend_wrapper.make_document(
- doc_id, rev, content, has_conflicts=has_conflicts)
-
-
LOCAL_DATABASES_SCENARIOS = [
- ('mem', {'make_database_for_test': make_memory_database_for_test,
- 'copy_database_for_test': copy_memory_database_for_test,
- 'make_document_for_test': make_document_for_test}),
- ('sql', {'make_database_for_test':
- make_sqlite_partial_expanded_for_test,
- 'copy_database_for_test':
- copy_sqlite_partial_expanded_for_test,
- 'make_document_for_test': make_document_for_test}),
- ]
-
-
-C_DATABASE_SCENARIOS = [
- ('c', {'make_database_for_test': make_c_database_for_test,
- 'copy_database_for_test': copy_c_database_for_test,
- 'make_document_for_test': make_c_document_for_test})]
+ ('mem', {'make_database_for_test': make_memory_database_for_test,
+ 'copy_database_for_test': copy_memory_database_for_test,
+ 'make_document_for_test': make_document_for_test}),
+ ('sql', {'make_database_for_test':
+ make_sqlite_partial_expanded_for_test,
+ 'copy_database_for_test':
+ copy_sqlite_partial_expanded_for_test,
+ 'make_document_for_test': make_document_for_test}),
+]
class DatabaseBaseTests(TestCase):
@@ -293,7 +251,7 @@ class ServerStateForTests(server_state.ServerState):
def ensure_database(self, path):
try:
- db = self.open_database(path)
+ db = self.open_database(path)
except errors.DatabaseDoesNotExist:
db = self._create_database(path)
return db, db._replica_uid
@@ -429,12 +387,12 @@ class TestingOAuthDataStore(oauth.OAuthDataStore):
consumers = {
consumer1.key: consumer1,
consumer2.key: consumer2,
- }
+ }
tokens = {
token1.key: token1,
token2.key: token2
- }
+ }
def lookup_consumer(self, key):
return self.consumers.get(key)
diff --git a/tests/u1db_tests/test_backends.py b/tests/u1db_tests/test_backends.py
index c93589ea..81150994 100644
--- a/tests/u1db_tests/test_backends.py
+++ b/tests/u1db_tests/test_backends.py
@@ -24,7 +24,7 @@ from u1db import (
DocumentBase,
errors,
vectorclock,
- )
+)
from leap.soledad.tests import u1db_tests as tests
@@ -38,12 +38,7 @@ from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
from u1db.remote import (
http_database,
- )
-
-try:
- from u1db.tests import c_backend_wrapper
-except ImportError:
- c_backend_wrapper = None # noqa
+)
def make_http_database_for_test(test, replica_uid, path='test'):
@@ -97,7 +92,7 @@ class AllDatabaseTests(tests.DatabaseBaseTests, tests.TestCaseWithServer):
copy_oauth_http_database_for_test,
'make_document_for_test': tests.make_document_for_test,
'make_app_with_state': make_oauth_http_app})
- ] #+ tests.C_DATABASE_SCENARIOS
+ ]
def test_close(self):
self.db.close()
@@ -320,7 +315,7 @@ class AllDatabaseTests(tests.DatabaseBaseTests, tests.TestCaseWithServer):
cur_vc = vectorclock.VectorClockRev(old_rev)
deleted_vc = vectorclock.VectorClockRev(doc.rev)
self.assertTrue(deleted_vc.is_newer(cur_vc),
- "%s does not supersede %s" % (doc.rev, old_rev))
+ "%s does not supersede %s" % (doc.rev, old_rev))
def test_delete_then_put(self):
doc = self.db.create_doc_from_json(simple_doc)
@@ -334,7 +329,7 @@ class AllDatabaseTests(tests.DatabaseBaseTests, tests.TestCaseWithServer):
class DocumentSizeTests(tests.DatabaseBaseTests):
- scenarios = tests.LOCAL_DATABASES_SCENARIOS #+ tests.C_DATABASE_SCENARIOS
+ scenarios = tests.LOCAL_DATABASES_SCENARIOS
def test_put_doc_refuses_oversized_documents(self):
self.db.set_document_size_limit(1)
@@ -358,7 +353,7 @@ class DocumentSizeTests(tests.DatabaseBaseTests):
class LocalDatabaseTests(tests.DatabaseBaseTests):
- scenarios = tests.LOCAL_DATABASES_SCENARIOS #+ tests.C_DATABASE_SCENARIOS
+ scenarios = tests.LOCAL_DATABASES_SCENARIOS
def test_create_doc_different_ids_diff_db(self):
doc1 = self.db.create_doc_from_json(simple_doc)
@@ -517,17 +512,23 @@ class LocalDatabaseTests(tests.DatabaseBaseTests):
doc2 = self.make_document(doc1.doc_id, doc1.rev + '|other:1',
nested_doc)
self.assertEqual('inserted',
- self.db._put_doc_if_newer(doc2, save_conflict=False,
- replica_uid='other', replica_gen=2,
- replica_trans_id='T-id2')[0])
+ self.db._put_doc_if_newer(
+ doc2,
+ save_conflict=False,
+ replica_uid='other',
+ replica_gen=2,
+ replica_trans_id='T-id2')[0])
self.assertEqual((2, 'T-id2'), self.db._get_replica_gen_and_trans_id(
'other'))
# Compare to the old rev, should be superseded
doc2 = self.make_document(doc1.doc_id, doc1.rev, nested_doc)
self.assertEqual('superseded',
- self.db._put_doc_if_newer(doc2, save_conflict=False,
- replica_uid='other', replica_gen=3,
- replica_trans_id='T-id3')[0])
+ self.db._put_doc_if_newer(
+ doc2,
+ save_conflict=False,
+ replica_uid='other',
+ replica_gen=3,
+ replica_trans_id='T-id3')[0])
self.assertEqual(
(3, 'T-id3'), self.db._get_replica_gen_and_trans_id('other'))
# A conflict that isn't saved still records the sync gen, because we
@@ -535,9 +536,12 @@ class LocalDatabaseTests(tests.DatabaseBaseTests):
doc2 = self.make_document(doc1.doc_id, doc1.rev + '|fourth:1',
'{}')
self.assertEqual('conflicted',
- self.db._put_doc_if_newer(doc2, save_conflict=False,
- replica_uid='other', replica_gen=4,
- replica_trans_id='T-id4')[0])
+ self.db._put_doc_if_newer(
+ doc2,
+ save_conflict=False,
+ replica_uid='other',
+ replica_gen=4,
+ replica_trans_id='T-id4')[0])
self.assertEqual(
(4, 'T-id4'), self.db._get_replica_gen_and_trans_id('other'))
@@ -603,7 +607,7 @@ class LocalDatabaseTests(tests.DatabaseBaseTests):
class LocalDatabaseValidateGenNTransIdTests(tests.DatabaseBaseTests):
- scenarios = tests.LOCAL_DATABASES_SCENARIOS #+ tests.C_DATABASE_SCENARIOS
+ scenarios = tests.LOCAL_DATABASES_SCENARIOS
def test_validate_gen_and_trans_id(self):
self.db.create_doc_from_json(simple_doc)
@@ -627,7 +631,7 @@ class LocalDatabaseValidateGenNTransIdTests(tests.DatabaseBaseTests):
class LocalDatabaseValidateSourceGenTests(tests.DatabaseBaseTests):
- scenarios = tests.LOCAL_DATABASES_SCENARIOS #+ tests.C_DATABASE_SCENARIOS
+ scenarios = tests.LOCAL_DATABASES_SCENARIOS
def test_validate_source_gen_and_trans_id_same(self):
self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
@@ -647,7 +651,7 @@ class LocalDatabaseValidateSourceGenTests(tests.DatabaseBaseTests):
class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
# test supporting/functionality around storing conflicts
- scenarios = tests.LOCAL_DATABASES_SCENARIOS #+ tests.C_DATABASE_SCENARIOS
+ scenarios = tests.LOCAL_DATABASES_SCENARIOS
def test_get_docs_conflicted(self):
doc1 = self.db.create_doc_from_json(simple_doc)
@@ -668,7 +672,7 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
nested_doc)
self.assertEqual([no_conflict_doc, doc2],
list(self.db.get_docs([doc1.doc_id, doc2.doc_id],
- check_for_conflicts=False)))
+ check_for_conflicts=False)))
def test_get_doc_conflicts(self):
doc = self.db.create_doc_from_json(simple_doc)
@@ -702,7 +706,8 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
replica_trans_id='foo')
self.assertGetDocConflicts(self.db, doc.doc_id,
- [('alternate:1', nested_doc), (doc.rev, simple_doc)])
+ [('alternate:1', nested_doc),
+ (doc.rev, simple_doc)])
orig_rev = doc.rev
self.db.resolve_doc(doc, [alt_doc.rev, doc.rev])
self.assertNotEqual(orig_rev, doc.rev)
@@ -746,15 +751,15 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
doc3, save_conflict=True, replica_uid='r', replica_gen=2,
replica_trans_id='bar')
self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc),
- (doc2.rev, nested_doc)])
+ [(doc3.rev, content3),
+ (doc1.rev, simple_doc),
+ (doc2.rev, nested_doc)])
self.db.resolve_doc(doc1, [doc2.rev, doc1.rev])
self.assertTrue(doc1.has_conflicts)
self.assertGetDoc(self.db, doc1.doc_id, doc3.rev, content3, True)
self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc)])
+ [(doc3.rev, content3),
+ (doc1.rev, simple_doc)])
def test_resolve_doc_partial_winning(self):
doc1 = self.db.create_doc_from_json(simple_doc)
@@ -832,9 +837,9 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
doc22, save_conflict=True, replica_uid='r', replica_gen=3,
replica_trans_id='zed')
self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:2', doc22.get_json()),
- ('altalt:1', doc3.get_json()),
- (doc1.rev, simple_doc)])
+ [('alternate:2', doc22.get_json()),
+ ('altalt:1', doc3.get_json()),
+ (doc1.rev, simple_doc)])
def test_put_doc_if_newer_save_conflict_was_deleted(self):
doc1 = self.db.create_doc_from_json(simple_doc)
@@ -847,7 +852,8 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
self.assertGetDoc(
self.db, doc1.doc_id, 'alternate:1', nested_doc, True)
self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:1', nested_doc), (doc1.rev, None)])
+ [('alternate:1', nested_doc),
+ (doc1.rev, None)])
def test_put_doc_if_newer_propagates_full_resolution(self):
doc1 = self.db.create_doc_from_json(simple_doc)
@@ -860,7 +866,7 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
resolved_vcr.maximize(vcr_2)
resolved_vcr.increment('alternate')
doc_resolved = self.make_document(doc1.doc_id, resolved_vcr.as_str(),
- '{"good": 1}')
+ '{"good": 1}')
state, _ = self.db._put_doc_if_newer(
doc_resolved, save_conflict=True, replica_uid='r', replica_gen=2,
replica_trans_id='foo2')
@@ -881,8 +887,9 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
doc3, save_conflict=True, replica_uid='r', replica_gen=2,
replica_trans_id='foo2')
self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:1', nested_doc), ('test:1', simple_doc),
- ('altalt:1', '{}')])
+ [('alternate:1', nested_doc),
+ ('test:1', simple_doc),
+ ('altalt:1', '{}')])
resolved_vcr = vectorclock.VectorClockRev(doc1.rev)
vcr_3 = vectorclock.VectorClockRev(doc3.rev)
resolved_vcr.maximize(vcr_3)
@@ -897,7 +904,8 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
doc4 = self.db.get_doc(doc1.doc_id)
self.assertTrue(doc4.has_conflicts)
self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:2|test:1', '{"good": 1}'), ('altalt:1', '{}')])
+ [('alternate:2|test:1', '{"good": 1}'),
+ ('altalt:1', '{}')])
def test_put_doc_if_newer_replica_uid(self):
doc1 = self.db.create_doc_from_json(simple_doc)
@@ -911,9 +919,12 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
doc2 = self.make_document(doc1.doc_id, doc1.rev + '|third:3',
'{}')
self.assertEqual('conflicted',
- self.db._put_doc_if_newer(doc2, save_conflict=True,
- replica_uid='other', replica_gen=3,
- replica_trans_id='T-id3')[0])
+ self.db._put_doc_if_newer(
+ doc2,
+ save_conflict=True,
+ replica_uid='other',
+ replica_gen=3,
+ replica_trans_id='T-id3')[0])
self.assertEqual(
(3, 'T-id3'), self.db._get_replica_gen_and_trans_id('other'))
@@ -962,7 +973,7 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
rev_a3 = vectorclock.VectorClockRev('test:3')
rev_a1b1 = vectorclock.VectorClockRev('test:1|other:1')
self.assertTrue(rev.is_newer(rev_a3))
- self.assertTrue('test:4' in doc.rev) # locally increased
+ self.assertTrue('test:4' in doc.rev) # locally increased
self.assertTrue(rev.is_newer(rev_a1b1))
def test_put_doc_if_newer_autoresolve_4(self):
@@ -988,7 +999,7 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
rev_a3 = vectorclock.VectorClockRev('test:3')
rev_a1b1 = vectorclock.VectorClockRev('test:1|other:1')
self.assertTrue(rev.is_newer(rev_a3))
- self.assertTrue('test:4' in doc.rev) # locally increased
+ self.assertTrue('test:4' in doc.rev) # locally increased
self.assertTrue(rev.is_newer(rev_a1b1))
def test_put_refuses_to_update_conflicted(self):
@@ -1015,7 +1026,7 @@ class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
class DatabaseIndexTests(tests.DatabaseBaseTests):
- scenarios = tests.LOCAL_DATABASES_SCENARIOS #+ tests.C_DATABASE_SCENARIOS
+ scenarios = tests.LOCAL_DATABASES_SCENARIOS
def assertParseError(self, definition):
self.db.create_doc_from_json(nested_doc)
diff --git a/tests/u1db_tests/test_document.py b/tests/u1db_tests/test_document.py
index 2a0c0294..e706e1a9 100644
--- a/tests/u1db_tests/test_document.py
+++ b/tests/u1db_tests/test_document.py
@@ -23,7 +23,7 @@ from leap.soledad.tests import u1db_tests as tests
class TestDocument(tests.TestCase):
scenarios = ([(
- 'py', {'make_document_for_test': tests.make_document_for_test})]) #+
+ 'py', {'make_document_for_test': tests.make_document_for_test})]) # +
#tests.C_DATABASE_SCENARIOS)
def test_create_doc(self):
@@ -37,7 +37,7 @@ class TestDocument(tests.TestCase):
doc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
self.assertEqual(
'%s(doc-id, uid:1, \'{"key": "value"}\')'
- % (doc.__class__.__name__,),
+ % (doc.__class__.__name__,),
repr(doc))
def test__repr__conflicted(self):
@@ -45,7 +45,7 @@ class TestDocument(tests.TestCase):
has_conflicts=True)
self.assertEqual(
'%s(doc-id, uid:1, conflicted, \'{"key": "value"}\')'
- % (doc.__class__.__name__,),
+ % (doc.__class__.__name__,),
repr(doc))
def test__lt__(self):
diff --git a/tests/u1db_tests/test_http_app.py b/tests/u1db_tests/test_http_app.py
index 73838613..e0729aa2 100644
--- a/tests/u1db_tests/test_http_app.py
+++ b/tests/u1db_tests/test_http_app.py
@@ -28,14 +28,14 @@ from u1db import (
__version__ as _u1db_version,
errors,
sync,
- )
+)
from leap.soledad.tests import u1db_tests as tests
from u1db.remote import (
http_app,
http_errors,
- )
+)
class TestFencedReader(tests.TestCase):
@@ -286,7 +286,7 @@ class TestHTTPInvocationByMethodWithBody(tests.TestCase):
'{"entry": "x"},\r\n' # stream entry
'{"entry": "y"}\r\n' # stream entry
']'
- )
+ )
environ = {'QUERY_STRING': 'a=1', 'REQUEST_METHOD': 'PUT',
'wsgi.input': StringIO.StringIO(body),
'CONTENT_LENGTH': str(len(body)),
@@ -500,8 +500,8 @@ class TestHTTPResponder(tests.TestCase):
self.assertEqual({'content-type': 'application/x-u1db-multi-json',
'cache-control': 'no-cache'}, self.headers)
self.assertEqual(['[',
- '\r\n', '{"entry": 1}',
- ',\r\n', '{"entry": 2}',
+ '\r\n', '{"entry": 1}',
+ ',\r\n', '{"entry": 2}',
'\r\n]\r\n'], self.response_body)
self.assertEqual([], responder.content)
@@ -516,7 +516,7 @@ class TestHTTPResponder(tests.TestCase):
self.assertEqual({'content-type': 'application/x-u1db-multi-json',
'cache-control': 'no-cache'}, self.headers)
self.assertEqual(['[',
- '\r\n', '{"entry": 1}'], self.response_body)
+ '\r\n', '{"entry": 1}'], self.response_body)
self.assertEqual([',\r\n', '{"error": "unavailable"}\r\n'],
responder.content)
@@ -780,12 +780,13 @@ class TestHTTPApp(tests.TestCase):
source_replica_uid='other-id',
source_replica_generation=1,
source_transaction_id='T-transid'),
- json.loads(resp.body))
+ json.loads(resp.body))
def test_record_sync_info(self):
resp = self.app.put('/db0/sync-from/other-id',
- params='{"generation": 2, "transaction_id": "T-transid"}',
- headers={'content-type': 'application/json'})
+ params='{"generation": 2, "transaction_id": '
+ '"T-transid"}',
+ headers={'content-type': 'application/json'})
self.assertEqual(200, resp.status)
self.assertEqual('application/json', resp.header('content-type'))
self.assertEqual({'ok': True}, json.loads(resp.body))
@@ -799,7 +800,7 @@ class TestHTTPApp(tests.TestCase):
'{"value": "here"}', 'gen': 10, 'trans_id': 'T-sid'},
11: {'id': 'doc-here2', 'rev': 'replica:1', 'content':
'{"value": "here2"}', 'gen': 11, 'trans_id': 'T-sed'}
- }
+ }
gens = []
_do_set_replica_gen_and_trans_id = \
@@ -824,9 +825,9 @@ class TestHTTPApp(tests.TestCase):
"%s\r\n" % json.dumps(entries[11]) +
"]\r\n")
resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
+ params=body,
+ headers={'content-type':
+ 'application/x-u1db-sync-stream'})
self.assertEqual(200, resp.status)
self.assertEqual('application/x-u1db-sync-stream',
resp.header('content-type'))
@@ -846,7 +847,7 @@ class TestHTTPApp(tests.TestCase):
'{"value": "here"}', 'gen': 10, 'trans_id': 'T-sid'},
11: {'id': 'doc-here2', 'rev': 'replica:1', 'content':
'{"value": "here2"}', 'gen': 11, 'trans_id': 'T-sed'}
- }
+ }
args = dict(last_known_generation=0, ensure=True)
body = ("[\r\n" +
@@ -855,9 +856,9 @@ class TestHTTPApp(tests.TestCase):
"%s\r\n" % json.dumps(entries[11]) +
"]\r\n")
resp = self.app.post('/dbnew/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
+ params=body,
+ headers={'content-type':
+ 'application/x-u1db-sync-stream'})
self.assertEqual(200, resp.status)
self.assertEqual('application/x-u1db-sync-stream',
resp.header('content-type'))
@@ -878,16 +879,16 @@ class TestHTTPApp(tests.TestCase):
entries = {
10: {'id': 'doc-here', 'rev': 'replica:1', 'content':
'{"value": "%s"}' % ('H' * 11000), 'gen': 10},
- }
+ }
args = dict(last_known_generation=0)
body = ("[\r\n" +
"%s,\r\n" % json.dumps(args) +
"%s\r\n" % json.dumps(entries[10]) +
"]\r\n")
resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'},
+ params=body,
+ headers={'content-type':
+ 'application/x-u1db-sync-stream'},
expect_errors=True)
self.assertEqual(400, resp.status)
@@ -897,9 +898,9 @@ class TestHTTPApp(tests.TestCase):
args = dict(last_known_generation=0)
body = "[\r\n%s\r\n]" % json.dumps(args)
resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
+ params=body,
+ headers={'content-type':
+ 'application/x-u1db-sync-stream'})
self.assertEqual(200, resp.status)
self.assertEqual('application/x-u1db-sync-stream',
resp.header('content-type'))
@@ -934,9 +935,9 @@ class TestHTTPApp(tests.TestCase):
self.patch(sync.SyncExchange, 'return_docs',
boom)
resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
+ params=body,
+ headers={'content-type':
+ 'application/x-u1db-sync-stream'})
self.assertEqual(200, resp.status)
self.assertEqual('application/x-u1db-sync-stream',
resp.header('content-type'))
diff --git a/tests/u1db_tests/test_http_client.py b/tests/u1db_tests/test_http_client.py
index b1bb106c..42e98461 100644
--- a/tests/u1db_tests/test_http_client.py
+++ b/tests/u1db_tests/test_http_client.py
@@ -24,13 +24,13 @@ except ImportError:
from u1db import (
errors,
- )
+)
from leap.soledad.tests import u1db_tests as tests
from u1db.remote import (
http_client,
- )
+)
class TestEncoder(tests.TestCase):
@@ -126,7 +126,7 @@ class TestHTTPClientBase(tests.TestCaseWithServer):
start_response("401 Unauthorized",
[('Content-Type', 'application/json')])
return [json.dumps({"error": "unauthorized",
- "message": e.message})]
+ "message": e.message})]
start_response("200 OK", [('Content-Type', 'application/json')])
return [json.dumps([environ['PATH_INFO'], token.key, params])]
@@ -146,7 +146,7 @@ class TestHTTPClientBase(tests.TestCaseWithServer):
def test_parse_url(self):
cli = http_client.HTTPClientBase(
- '%s://127.0.0.1:12345/' % self.url_scheme)
+ '%s://127.0.0.1:12345/' % self.url_scheme)
self.assertEqual(self.url_scheme, cli._url.scheme)
self.assertEqual('127.0.0.1', cli._url.hostname)
self.assertEqual(12345, cli._url.port)
@@ -187,7 +187,7 @@ class TestHTTPClientBase(tests.TestCaseWithServer):
'REQUEST_METHOD': 'GET'}, json.loads(res))
res, headers = cli._request('POST', ['echo'], {'b': 2}, 'Body',
- 'application/x-test')
+ 'application/x-test')
self.assertEqual({'CONTENT_TYPE': 'application/x-test',
'PATH_INFO': '/dbase/echo',
'QUERY_STRING': 'b=2',
@@ -342,7 +342,7 @@ class TestHTTPClientBase(tests.TestCaseWithServer):
'consumer_secret': tests.consumer1.secret,
'token_key': tests.token1.key,
'token_secret': tests.token1.secret,
- }})
+ }})
params = {'x': u'\xf0', 'y': "foo"}
res, headers = cli._request('GET', ['doc', 'oauth'], params)
self.assertEqual(
diff --git a/tests/u1db_tests/test_http_database.py b/tests/u1db_tests/test_http_database.py
index dc20b6ec..40a839a5 100644
--- a/tests/u1db_tests/test_http_database.py
+++ b/tests/u1db_tests/test_http_database.py
@@ -25,14 +25,14 @@ except ImportError:
from u1db import (
errors,
Document,
- )
+)
from leap.soledad.tests import u1db_tests as tests
from u1db.remote import (
http_database,
http_target,
- )
+)
from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
make_http_app,
)
@@ -48,14 +48,14 @@ class TestHTTPDatabaseSimpleOperations(tests.TestCase):
self.response_val = None
def _request(method, url_parts, params=None, body=None,
- content_type=None):
+ content_type=None):
self.got = method, url_parts, params, body, content_type
if isinstance(self.response_val, Exception):
raise self.response_val
return self.response_val
def _request_json(method, url_parts, params=None, body=None,
- content_type=None):
+ content_type=None):
self.got = method, url_parts, params, body, content_type
if isinstance(self.response_val, Exception):
raise self.response_val
@@ -67,13 +67,15 @@ class TestHTTPDatabaseSimpleOperations(tests.TestCase):
def test__sanity_same_signature(self):
my_request_sig = inspect.getargspec(self.db._request)
my_request_sig = (['self'] + my_request_sig[0],) + my_request_sig[1:]
- self.assertEqual(my_request_sig,
- inspect.getargspec(http_database.HTTPDatabase._request))
+ self.assertEqual(
+ my_request_sig,
+ inspect.getargspec(http_database.HTTPDatabase._request))
my_request_json_sig = inspect.getargspec(self.db._request_json)
my_request_json_sig = ((['self'] + my_request_json_sig[0],) +
my_request_json_sig[1:])
- self.assertEqual(my_request_json_sig,
- inspect.getargspec(http_database.HTTPDatabase._request_json))
+ self.assertEqual(
+ my_request_json_sig,
+ inspect.getargspec(http_database.HTTPDatabase._request_json))
def test__ensure(self):
self.response_val = {'ok': True}, {}
@@ -197,7 +199,7 @@ class TestHTTPDatabaseCtrWithCreds(tests.TestCase):
'consumer_secret': tests.consumer1.secret,
'token_key': tests.token1.key,
'token_secret': tests.token1.secret
- }})
+ }})
self.assertIn('oauth', db1._creds)
diff --git a/tests/u1db_tests/test_https.py b/tests/u1db_tests/test_https.py
index 0f4541d4..3f8797d8 100644
--- a/tests/u1db_tests/test_https.py
+++ b/tests/u1db_tests/test_https.py
@@ -11,11 +11,11 @@ from leap.soledad.tests import u1db_tests as tests
from u1db.remote import (
http_client,
http_target,
- )
+)
from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
make_oauth_http_app,
- )
+)
def https_server_def():
@@ -56,10 +56,11 @@ class TestHttpSyncTargetHttpsSupport(tests.TestCaseWithServer):
scenarios = [
('oauth_https', {'server_def': https_server_def,
'make_app_with_state': make_oauth_http_app,
- 'make_document_for_test': tests.make_document_for_test,
+ 'make_document_for_test':
+ tests.make_document_for_test,
'sync_target': oauth_https_sync_target
}),
- ]
+ ]
def setUp(self):
try:
diff --git a/tests/u1db_tests/test_open.py b/tests/u1db_tests/test_open.py
index 88312402..0ff307e8 100644
--- a/tests/u1db_tests/test_open.py
+++ b/tests/u1db_tests/test_open.py
@@ -21,7 +21,7 @@ import os
from u1db import (
errors,
open as u1db_open,
- )
+)
from leap.soledad.tests import u1db_tests as tests
from u1db.backends import sqlite_backend
from leap.soledad.tests.u1db_tests.test_backends import TestAlternativeDocument
diff --git a/tests/u1db_tests/test_remote_sync_target.py b/tests/u1db_tests/test_remote_sync_target.py
index 6f69073d..66d404d2 100644
--- a/tests/u1db_tests/test_remote_sync_target.py
+++ b/tests/u1db_tests/test_remote_sync_target.py
@@ -20,7 +20,7 @@ import cStringIO
from u1db import (
errors,
- )
+)
from leap.soledad.tests import u1db_tests as tests
@@ -28,7 +28,7 @@ from u1db.remote import (
http_app,
http_target,
oauth_middleware,
- )
+)
class TestHTTPSyncTargetBasics(tests.TestCase):
@@ -139,7 +139,7 @@ class TestRemoteSyncTargets(tests.TestCaseWithServer):
('oauth_http', {'make_app_with_state': make_oauth_http_app,
'make_document_for_test': tests.make_document_for_test,
'sync_target': oauth_http_sync_target}),
- ]
+ ]
def getSyncTarget(self, path=None):
if self.server is None:
@@ -197,8 +197,9 @@ class TestRemoteSyncTargets(tests.TestCaseWithServer):
if doc.doc_id in trigger_ids:
raise Exception
return _put_doc_if_newer(doc, save_conflict=save_conflict,
- replica_uid=replica_uid, replica_gen=replica_gen,
- replica_trans_id=replica_trans_id)
+ replica_uid=replica_uid,
+ replica_gen=replica_gen,
+ replica_trans_id=replica_trans_id)
self.patch(db, '_put_doc_if_newer', bomb_put_doc_if_newer)
remote_target = self.getSyncTarget('test')
other_changes = []
diff --git a/tests/u1db_tests/test_sqlite_backend.py b/tests/u1db_tests/test_sqlite_backend.py
index 081d3ae7..2003da03 100644
--- a/tests/u1db_tests/test_sqlite_backend.py
+++ b/tests/u1db_tests/test_sqlite_backend.py
@@ -25,7 +25,7 @@ from sqlite3 import dbapi2
from u1db import (
errors,
query_parser,
- )
+)
from leap.soledad.tests import u1db_tests as tests
@@ -103,7 +103,7 @@ class TestSQLitePartialExpandDatabase(tests.TestCase):
raw_db = self.db._get_sqlite_handle()
self.db._close_sqlite_handle()
self.assertRaises(dbapi2.ProgrammingError,
- raw_db.cursor)
+ raw_db.cursor)
def test_create_database_initializes_schema(self):
raw_db = self.db._get_sqlite_handle()
@@ -210,8 +210,7 @@ class TestSQLitePartialExpandDatabase(tests.TestCase):
[(doc1.doc_id, "key1", "val1"),
(doc1.doc_id, "key2", "val2"),
(doc2.doc_id, "key1", "valx"),
- (doc2.doc_id, "key2", "valy"),
- ]), sorted(c.fetchall()))
+ (doc2.doc_id, "key2", "valy"), ]), sorted(c.fetchall()))
def test_put_updates_fields(self):
self.db.create_index('test', 'key1', 'key2')
@@ -223,8 +222,7 @@ class TestSQLitePartialExpandDatabase(tests.TestCase):
c.execute("SELECT doc_id, field_name, value FROM document_fields"
" ORDER BY doc_id, field_name, value")
self.assertEqual([(doc1.doc_id, "key1", "val1"),
- (doc1.doc_id, "key2", "valy"),
- ], c.fetchall())
+ (doc1.doc_id, "key2", "valy"), ], c.fetchall())
def test_put_updates_nested_fields(self):
self.db.create_index('test', 'key', 'sub.doc')
@@ -233,19 +231,19 @@ class TestSQLitePartialExpandDatabase(tests.TestCase):
c.execute("SELECT doc_id, field_name, value FROM document_fields"
" ORDER BY doc_id, field_name, value")
self.assertEqual([(doc1.doc_id, "key", "value"),
- (doc1.doc_id, "sub.doc", "underneath"),
- ], c.fetchall())
+ (doc1.doc_id, "sub.doc", "underneath"), ],
+ c.fetchall())
def test__ensure_schema_rollback(self):
temp_dir = self.createTempDir(prefix='u1db-test-')
path = temp_dir + '/rollback.db'
class SQLitePartialExpandDbTesting(
- sqlite_backend.SQLitePartialExpandDatabase):
+ sqlite_backend.SQLitePartialExpandDatabase):
def _set_replica_uid_in_transaction(self, uid):
super(SQLitePartialExpandDbTesting,
- self)._set_replica_uid_in_transaction(uid)
+ self)._set_replica_uid_in_transaction(uid)
if fail:
raise Exception()
@@ -275,13 +273,13 @@ class TestSQLitePartialExpandDatabase(tests.TestCase):
temp_dir = self.createTempDir(prefix='u1db-test-')
path = temp_dir + '/non-existent.sqlite'
self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase._open_database, path)
+ sqlite_backend.SQLiteDatabase._open_database, path)
def test__open_database_during_init(self):
temp_dir = self.createTempDir(prefix='u1db-test-')
path = temp_dir + '/initialised.db'
db = sqlite_backend.SQLitePartialExpandDatabase.__new__(
- sqlite_backend.SQLitePartialExpandDatabase)
+ sqlite_backend.SQLitePartialExpandDatabase)
db._db_handle = dbapi2.connect(path) # db is there but not yet init-ed
self.addCleanup(db.close)
observed = []
@@ -299,9 +297,10 @@ class TestSQLitePartialExpandDatabase(tests.TestCase):
db2 = SQLiteDatabaseTesting._open_database(path)
self.addCleanup(db2.close)
self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
- self.assertEqual([None,
- sqlite_backend.SQLitePartialExpandDatabase._index_storage_value],
- observed)
+ self.assertEqual(
+ [None,
+ sqlite_backend.SQLitePartialExpandDatabase._index_storage_value],
+ observed)
def test__open_database_invalid(self):
class SQLiteDatabaseTesting(sqlite_backend.SQLiteDatabase):
diff --git a/tests/u1db_tests/test_sync.py b/tests/u1db_tests/test_sync.py
index 551826b6..96aa2736 100644
--- a/tests/u1db_tests/test_sync.py
+++ b/tests/u1db_tests/test_sync.py
@@ -24,21 +24,21 @@ from u1db import (
sync,
vectorclock,
SyncTarget,
- )
+)
from leap.soledad.tests import u1db_tests as tests
from u1db.backends import (
inmemory,
- )
+)
from u1db.remote import (
http_target,
- )
+)
from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
make_http_app,
make_oauth_http_app,
- )
+)
simple_doc = tests.simple_doc
nested_doc = tests.nested_doc
@@ -71,7 +71,7 @@ target_scenarios = [
('oauth_http', {'create_db_and_target':
_make_local_db_and_oauth_http_target,
'make_app_with_state': make_oauth_http_app}),
- ]
+]
class DatabaseSyncTargetTests(tests.DatabaseBaseTests,
@@ -369,8 +369,9 @@ class DatabaseSyncTargetTests(tests.DatabaseBaseTests,
def test__set_trace_hook_shallow(self):
if (self.st._set_trace_hook_shallow == self.st._set_trace_hook
- or self.st._set_trace_hook_shallow.im_func ==
- SyncTarget._set_trace_hook_shallow.im_func):
+ or
+ self.st._set_trace_hook_shallow.im_func ==
+ SyncTarget._set_trace_hook_shallow.im_func):
# shallow same as full
expected = ['before whats_changed',
'after whats_changed',
@@ -456,7 +457,7 @@ sync_scenarios.append(('pyhttp', {
'make_document_for_test': tests.make_document_for_test,
'make_app_with_state': make_http_app,
'do_sync': sync_via_synchronizer_and_http
- }))
+}))
class DatabaseSyncTests(tests.DatabaseBaseTests,
@@ -476,7 +477,7 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
def create_database_for_role(self, replica_uid, sync_role):
# hook point for reuse
- return super(DatabaseSyncTests, self).create_database(replica_uid)
+ return super(DatabaseSyncTests, self).create_database(replica_uid)
def copy_database(self, db, sync_role=None):
# DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
@@ -522,8 +523,10 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertEqual(
(0, ''), self.db2._get_replica_gen_and_trans_id('test1'))
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [], 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 0}})
+ {'receive':
+ {'docs': [], 'last_known_gen': 0},
+ 'return':
+ {'docs': [], 'last_gen': 0}})
def test_sync_autoresolves(self):
self.db1 = self.create_database('test1', 'source')
@@ -732,10 +735,12 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
+ {'receive':
+ {'docs': [(doc.doc_id, doc.rev)],
+ 'source_uid': 'test1',
+ 'source_gen': 1,
+ 'last_known_gen': 0},
+ 'return': {'docs': [], 'last_gen': 1}})
def test_sync_pulls_changes(self):
self.db1 = self.create_database('test1', 'source')
@@ -747,9 +752,11 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [], 'last_known_gen': 0},
- 'return': {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
+ {'receive':
+ {'docs': [], 'last_known_gen': 0},
+ 'return':
+ {'docs': [(doc.doc_id, doc.rev)],
+ 'last_gen': 1}})
self.assertEqual([doc], self.db1.get_from_index('test-idx', 'value'))
def test_sync_pulling_doesnt_update_other_if_changed(self):
@@ -770,9 +777,11 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertEqual(0, self.sync(self.db1, self.db2,
trace_hook=before_get_docs))
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [], 'last_known_gen': 0},
- 'return': {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
+ {'receive':
+ {'docs': [], 'last_known_gen': 0},
+ 'return':
+ {'docs': [(doc.doc_id, doc.rev)],
+ 'last_gen': 1}})
self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
# c2 should not have gotten a '_record_sync_info' call, because the
# local database had been updated more than just by the messages
@@ -804,10 +813,11 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertEqual(0, self.sync(self.db2, self.db3))
self.assertEqual(1, self.sync(self.db1, self.db2))
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
+ {'receive':
+ {'docs': [(doc.doc_id, doc.rev)],
+ 'source_uid': 'test1',
+ 'source_gen': 1, 'last_known_gen': 0},
+ 'return': {'docs': [], 'last_gen': 1}})
def test_sync_ignores_superseded(self):
self.db1 = self.create_database('test1', 'both')
@@ -823,11 +833,13 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
doc_rev2 = doc.rev
self.sync(self.db2, self.db1)
self.assertLastExchangeLog(self.db1,
- {'receive': {'docs': [(doc.doc_id, doc_rev1)],
- 'source_uid': 'test2',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [(doc.doc_id, doc_rev2)],
- 'last_gen': 2}})
+ {'receive':
+ {'docs': [(doc.doc_id, doc_rev1)],
+ 'source_uid': 'test2',
+ 'source_gen': 1, 'last_known_gen': 0},
+ 'return':
+ {'docs': [(doc.doc_id, doc_rev2)],
+ 'last_gen': 2}})
self.assertGetDoc(self.db1, doc.doc_id, doc_rev2, new_content, False)
def test_sync_sees_remote_conflicted(self):
@@ -843,11 +855,13 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertTransactionLog([doc1.doc_id], self.db1)
self.sync(self.db1, self.db2)
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc_id, doc1_rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [(doc_id, doc2_rev)],
- 'last_gen': 1}})
+ {'receive':
+ {'docs': [(doc_id, doc1_rev)],
+ 'source_uid': 'test1',
+ 'source_gen': 1, 'last_known_gen': 0},
+ 'return':
+ {'docs': [(doc_id, doc2_rev)],
+ 'last_gen': 1}})
self.assertTransactionLog([doc_id, doc_id], self.db1)
self.assertGetDoc(self.db1, doc_id, doc2_rev, new_doc, True)
self.assertGetDoc(self.db2, doc_id, doc2_rev, new_doc, False)
@@ -872,11 +886,12 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertTransactionLog([doc_id, doc_id], self.db1)
self.sync(self.db1, self.db2)
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc_id, doc1.rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [(doc_id, doc2.rev)],
- 'last_gen': 2}})
+ {'receive':
+ {'docs': [(doc_id, doc1.rev)],
+ 'source_uid': 'test1',
+ 'source_gen': 2, 'last_known_gen': 1},
+ 'return': {'docs': [(doc_id, doc2.rev)],
+ 'last_gen': 2}})
self.assertTransactionLog([doc_id, doc_id, doc_id], self.db1)
self.assertGetDocIncludeDeleted(self.db1, doc_id, doc2.rev, None, True)
self.assertGetDocIncludeDeleted(
@@ -929,10 +944,11 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
deleted_rev = doc1.rev
self.sync(self.db1, self.db2)
self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [], 'last_gen': 2}})
+ {'receive':
+ {'docs': [(doc_id, deleted_rev)],
+ 'source_uid': 'test1',
+ 'source_gen': 2, 'last_known_gen': 1},
+ 'return': {'docs': [], 'last_gen': 2}})
self.assertGetDocIncludeDeleted(
self.db1, doc_id, deleted_rev, None, False)
self.assertGetDocIncludeDeleted(
@@ -941,10 +957,13 @@ class DatabaseSyncTests(tests.DatabaseBaseTests,
self.assertEqual([], self.db2.get_from_index('test-idx', 'value'))
self.sync(self.db2, self.db3)
self.assertLastExchangeLog(self.db3,
- {'receive': {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test2',
- 'source_gen': 2, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 2}})
+ {'receive':
+ {'docs': [(doc_id, deleted_rev)],
+ 'source_uid': 'test2',
+ 'source_gen': 2,
+ 'last_known_gen': 0},
+ 'return':
+ {'docs': [], 'last_gen': 2}})
self.assertGetDocIncludeDeleted(
self.db3, doc_id, deleted_rev, None, False)
@@ -1104,13 +1123,13 @@ class TestDbSync(tests.TestCaseWithServer):
('py-http', {
'make_app_with_state': make_http_app,
'make_database_for_test': tests.make_memory_database_for_test,
- }),
+ }),
('py-oauth-http', {
'make_app_with_state': make_oauth_http_app,
'make_database_for_test': tests.make_memory_database_for_test,
'oauth': True
- }),
- ]
+ }),
+ ]
oauth = False
@@ -1121,8 +1140,8 @@ class TestDbSync(tests.TestCaseWithServer):
'consumer_key': tests.consumer1.key,
'consumer_secret': tests.consumer1.secret,
'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret
- }})
+ 'token_secret': tests.token1.secret,
+ }})
else:
path = target_name
extra = {}
@@ -1191,7 +1210,8 @@ class TestRemoteSyncIntegration(tests.TestCaseWithServer):
def set_sync_generation_witness1(other_uid, other_gen, trans_id):
progress1.append((other_uid, other_gen,
- [d for d, t in self.db1._get_transaction_log()[2:]]))
+ [d for d, t in
+ self.db1._get_transaction_log()[2:]]))
_do_set_replica_gen_and_trans_id(other_uid, other_gen, trans_id)
self.patch(self.db1, '_do_set_replica_gen_and_trans_id',
set_sync_generation_witness1)
@@ -1200,7 +1220,8 @@ class TestRemoteSyncIntegration(tests.TestCaseWithServer):
def set_sync_generation_witness2(other_uid, other_gen, trans_id):
progress2.append((other_uid, other_gen,
- [d for d, t in self.db2._get_transaction_log()[2:]]))
+ [d for d, t in
+ self.db2._get_transaction_log()[2:]]))
_do_set_replica_gen_and_trans_id2(other_uid, other_gen, trans_id)
self.patch(self.db2, '_do_set_replica_gen_and_trans_id',
set_sync_generation_witness2)
diff --git a/util.py b/util.py
index 040c70ab..319d28ab 100644
--- a/util.py
+++ b/util.py
@@ -2,17 +2,19 @@ import os
import gnupg
import re
+
class GPGWrapper(gnupg.GPG):
"""
This is a temporary class for handling GPG requests, and should be
replaced by a more general class used throughout the project.
"""
- GNUPG_HOME = os.environ['HOME'] + "/.config/leap/gnupg"
- GNUPG_BINARY = "/usr/bin/gpg" # this has to be changed based on OS
+ GNUPG_HOME = os.environ['HOME'] + "/.config/leap/gnupg"
+ GNUPG_BINARY = "/usr/bin/gpg" # this has to be changed based on OS
def __init__(self, gpghome=GNUPG_HOME, gpgbinary=GNUPG_BINARY):
- super(GPGWrapper, self).__init__(gnupghome=gpghome, gpgbinary=gpgbinary)
+ super(GPGWrapper, self).__init__(gnupghome=gpghome,
+ gpgbinary=gpgbinary)
def find_key(self, email):
"""