summaryrefslogtreecommitdiff
path: root/testing/tests
diff options
context:
space:
mode:
Diffstat (limited to 'testing/tests')
-rw-r--r--testing/tests/client/test_app.py8
-rw-r--r--testing/tests/client/test_doc.py4
-rw-r--r--testing/tests/client/test_https.py4
-rw-r--r--testing/tests/conftest.py18
-rw-r--r--testing/tests/couch/common.py16
-rw-r--r--testing/tests/couch/conftest.py31
-rw-r--r--testing/tests/couch/couchdb.ini.template22
-rw-r--r--testing/tests/couch/test_atomicity.py7
-rw-r--r--testing/tests/couch/test_backend.py8
-rw-r--r--testing/tests/couch/test_command.py10
-rw-r--r--testing/tests/couch/test_ddocs.py157
-rw-r--r--testing/tests/couch/test_state.py25
-rw-r--r--testing/tests/perf/assets/cert_default.conf15
-rw-r--r--testing/tests/perf/conftest.py249
-rw-r--r--testing/tests/perf/pytest.ini2
-rw-r--r--testing/tests/perf/test_crypto.py81
-rw-r--r--testing/tests/perf/test_encdecpool.py78
-rw-r--r--testing/tests/perf/test_misc.py6
-rw-r--r--testing/tests/perf/test_sqlcipher.py38
-rw-r--r--testing/tests/perf/test_sync.py68
-rw-r--r--testing/tests/server/test_server.py14
-rw-r--r--testing/tests/sqlcipher/test_backend.py58
-rw-r--r--testing/tests/sync/test_encdecpool.py19
-rw-r--r--testing/tests/sync/test_sync.py5
-rw-r--r--testing/tests/sync/test_sync_mutex.py7
25 files changed, 670 insertions, 280 deletions
diff --git a/testing/tests/client/test_app.py b/testing/tests/client/test_app.py
index fef2f371..6867473e 100644
--- a/testing/tests/client/test_app.py
+++ b/testing/tests/client/test_app.py
@@ -17,6 +17,8 @@
"""
Test ObjectStore and Couch backend bits.
"""
+import pytest
+
from testscenarios import TestWithScenarios
from test_soledad.util import BaseSoledadTest
@@ -31,9 +33,15 @@ from test_soledad.u1db_tests import test_backends
# The following tests come from `u1db.tests.test_backends`.
# -----------------------------------------------------------------------------
+@pytest.mark.usefixtures('method_tmpdir')
class SoledadTests(
TestWithScenarios, test_backends.AllDatabaseTests, BaseSoledadTest):
+ def setUp(self):
+ TestWithScenarios.setUp(self)
+ test_backends.AllDatabaseTests.setUp(self)
+ BaseSoledadTest.setUp(self)
+
scenarios = [
('token_http', {
'make_database_for_test': make_token_http_database_for_test,
diff --git a/testing/tests/client/test_doc.py b/testing/tests/client/test_doc.py
index e158d768..36479e90 100644
--- a/testing/tests/client/test_doc.py
+++ b/testing/tests/client/test_doc.py
@@ -17,6 +17,8 @@
"""
Test Leap backend bits: soledad docs
"""
+import pytest
+
from testscenarios import TestWithScenarios
from test_soledad.u1db_tests import test_document
@@ -28,6 +30,7 @@ from test_soledad.util import make_soledad_document_for_test
# The following tests come from `u1db.tests.test_document`.
# -----------------------------------------------------------------------------
+@pytest.mark.usefixtures('method_tmpdir')
class TestSoledadDocument(
TestWithScenarios,
test_document.TestDocument, BaseSoledadTest):
@@ -37,6 +40,7 @@ class TestSoledadDocument(
'make_document_for_test': make_soledad_document_for_test})])
+@pytest.mark.usefixtures('method_tmpdir')
class TestSoledadPyDocument(
TestWithScenarios,
test_document.TestPyDocument, BaseSoledadTest):
diff --git a/testing/tests/client/test_https.py b/testing/tests/client/test_https.py
index caac16da..1b6caed6 100644
--- a/testing/tests/client/test_https.py
+++ b/testing/tests/client/test_https.py
@@ -17,7 +17,7 @@
"""
Test Leap backend bits: https
"""
-from unittest import skip
+import pytest
from testscenarios import TestWithScenarios
@@ -62,7 +62,7 @@ def token_leap_https_sync_target(test, host, path, cert_file=None):
return st
-@skip("Skiping tests imported from U1DB.")
+@pytest.mark.skip
class TestSoledadHTTPSyncTargetHttpsSupport(
TestWithScenarios,
# test_https.TestHttpSyncTargetHttpsSupport,
diff --git a/testing/tests/conftest.py b/testing/tests/conftest.py
new file mode 100644
index 00000000..9e4319ac
--- /dev/null
+++ b/testing/tests/conftest.py
@@ -0,0 +1,18 @@
+import pytest
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ "--couch-url", type="string", default="http://127.0.0.1:5984",
+ help="the url for the couch server to be used during tests")
+
+
+@pytest.fixture
+def couch_url(request):
+ url = request.config.getoption('--couch-url')
+ request.cls.couch_url = url
+
+
+@pytest.fixture
+def method_tmpdir(request, tmpdir):
+ request.instance.tempdir = tmpdir.strpath
diff --git a/testing/tests/couch/common.py b/testing/tests/couch/common.py
index b08e1fa3..84790059 100644
--- a/testing/tests/couch/common.py
+++ b/testing/tests/couch/common.py
@@ -13,20 +13,17 @@ nested_doc = tests.nested_doc
def make_couch_database_for_test(test, replica_uid):
- port = str(test.couch_port)
dbname = ('test-%s' % uuid4().hex)
db = couch.CouchDatabase.open_database(
- urljoin('http://localhost:' + port, dbname),
+ urljoin(test.couch_url, dbname),
create=True,
- replica_uid=replica_uid or 'test',
- ensure_ddocs=True)
+ replica_uid=replica_uid or 'test')
test.addCleanup(test.delete_db, dbname)
return db
def copy_couch_database_for_test(test, db):
- port = str(test.couch_port)
- couch_url = 'http://localhost:' + port
+ couch_url = test.couch_url
new_dbname = db._dbname + '_copy'
new_db = couch.CouchDatabase.open_database(
urljoin(couch_url, new_dbname),
@@ -41,15 +38,10 @@ def copy_couch_database_for_test(test, db):
# bypass u1db_config document
if doc_id == 'u1db_config':
pass
- # copy design docs
- elif doc_id.startswith('_design'):
- del doc['_rev']
- new_couch_db.save(doc)
# copy u1db docs
elif 'u1db_rev' in doc:
new_doc = {
'_id': doc['_id'],
- 'u1db_transactions': doc['u1db_transactions'],
'u1db_rev': doc['u1db_rev']
}
attachments = []
@@ -65,6 +57,8 @@ def copy_couch_database_for_test(test, db):
if (att is not None):
new_couch_db.put_attachment(new_doc, att,
filename=att_name)
+ elif doc_id.startswith('gen-'):
+ new_couch_db.save(doc)
# cleanup connections to prevent file descriptor leaking
return new_db
diff --git a/testing/tests/couch/conftest.py b/testing/tests/couch/conftest.py
new file mode 100644
index 00000000..1074f091
--- /dev/null
+++ b/testing/tests/couch/conftest.py
@@ -0,0 +1,31 @@
+import couchdb
+import pytest
+import random
+import string
+
+
+@pytest.fixture
+def random_name():
+ return 'user-' + ''.join(
+ random.choice(
+ string.ascii_lowercase) for _ in range(10))
+
+
+class RandomDatabase(object):
+
+ def __init__(self, couch_url, name):
+ self.couch_url = couch_url
+ self.name = name
+ self.server = couchdb.client.Server(couch_url)
+ self.database = self.server.create(name)
+
+ def teardown(self):
+ self.server.delete(self.name)
+
+
+@pytest.fixture
+def db(random_name, request):
+ couch_url = request.config.getoption('--couch-url')
+ db = RandomDatabase(couch_url, random_name)
+ request.addfinalizer(db.teardown)
+ return db
diff --git a/testing/tests/couch/couchdb.ini.template b/testing/tests/couch/couchdb.ini.template
deleted file mode 100644
index 174d9d86..00000000
--- a/testing/tests/couch/couchdb.ini.template
+++ /dev/null
@@ -1,22 +0,0 @@
-; etc/couchdb/default.ini.tpl. Generated from default.ini.tpl.in by configure.
-
-; Upgrading CouchDB will overwrite this file.
-
-[couchdb]
-database_dir = %(tempdir)s/lib
-view_index_dir = %(tempdir)s/lib
-max_document_size = 4294967296 ; 4 GB
-os_process_timeout = 120000 ; 120 seconds. for view and external servers.
-max_dbs_open = 100
-delayed_commits = true ; set this to false to ensure an fsync before 201 Created is returned
-uri_file = %(tempdir)s/lib/couch.uri
-file_compression = snappy
-
-[log]
-file = %(tempdir)s/log/couch.log
-level = info
-include_sasl = true
-
-[httpd]
-port = 0
-bind_address = 127.0.0.1
diff --git a/testing/tests/couch/test_atomicity.py b/testing/tests/couch/test_atomicity.py
index aec9c6cf..a3ae0314 100644
--- a/testing/tests/couch/test_atomicity.py
+++ b/testing/tests/couch/test_atomicity.py
@@ -18,7 +18,7 @@
Test atomicity of couch operations.
"""
import os
-import tempfile
+import pytest
import threading
from urlparse import urljoin
@@ -41,6 +41,7 @@ from test_soledad.u1db_tests import TestCaseWithServer
REPEAT_TIMES = 20
+@pytest.mark.usefixtures('method_tmpdir')
class CouchAtomicityTestCase(CouchDBTestCase, TestCaseWithServer):
@staticmethod
@@ -90,9 +91,7 @@ class CouchAtomicityTestCase(CouchDBTestCase, TestCaseWithServer):
self.db = CouchDatabase.open_database(
urljoin(self.couch_url, 'user-' + self.user),
create=True,
- replica_uid='replica',
- ensure_ddocs=True)
- self.tempdir = tempfile.mkdtemp(prefix="leap_tests-")
+ replica_uid='replica')
self.startTwistedServer()
def tearDown(self):
diff --git a/testing/tests/couch/test_backend.py b/testing/tests/couch/test_backend.py
index f178e8a5..4fad11cf 100644
--- a/testing/tests/couch/test_backend.py
+++ b/testing/tests/couch/test_backend.py
@@ -39,12 +39,8 @@ class TestCouchBackendImpl(CouchDBTestCase):
def test__allocate_doc_id(self):
db = couch.CouchDatabase.open_database(
- urljoin(
- 'http://localhost:' + str(self.couch_port),
- ('test-%s' % uuid4().hex)
- ),
- create=True,
- ensure_ddocs=True)
+ urljoin(self.couch_url, 'test-%s' % uuid4().hex),
+ create=True)
doc_id1 = db._allocate_doc_id()
self.assertTrue(doc_id1.startswith('D-'))
self.assertEqual(34, len(doc_id1))
diff --git a/testing/tests/couch/test_command.py b/testing/tests/couch/test_command.py
index f61e118d..68097fb1 100644
--- a/testing/tests/couch/test_command.py
+++ b/testing/tests/couch/test_command.py
@@ -1,6 +1,6 @@
from twisted.trial import unittest
-from leap.soledad.common import couch
+from leap.soledad.common.couch import state as couch_state
from leap.soledad.common.l2db import errors as u1db_errors
from mock import Mock
@@ -9,7 +9,8 @@ from mock import Mock
class CommandBasedDBCreationTest(unittest.TestCase):
def test_ensure_db_using_custom_command(self):
- state = couch.state.CouchServerState("url", create_cmd="echo")
+ state = couch_state.CouchServerState(
+ "url", create_cmd="/bin/echo", check_schema_versions=False)
mock_db = Mock()
mock_db.replica_uid = 'replica_uid'
state.open_database = Mock(return_value=mock_db)
@@ -18,11 +19,12 @@ class CommandBasedDBCreationTest(unittest.TestCase):
self.assertEquals(mock_db.replica_uid, replica_uid)
def test_raises_unauthorized_on_failure(self):
- state = couch.state.CouchServerState("url", create_cmd="inexistent")
+ state = couch_state.CouchServerState(
+ "url", create_cmd="inexistent", check_schema_versions=False)
self.assertRaises(u1db_errors.Unauthorized,
state.ensure_database, "user-1337")
def test_raises_unauthorized_by_default(self):
- state = couch.state.CouchServerState("url")
+ state = couch_state.CouchServerState("url", check_schema_versions=False)
self.assertRaises(u1db_errors.Unauthorized,
state.ensure_database, "user-1337")
diff --git a/testing/tests/couch/test_ddocs.py b/testing/tests/couch/test_ddocs.py
index 9ff32633..3937f2de 100644
--- a/testing/tests/couch/test_ddocs.py
+++ b/testing/tests/couch/test_ddocs.py
@@ -1,6 +1,5 @@
from uuid import uuid4
-from leap.soledad.common.couch import errors
from leap.soledad.common import couch
from test_soledad.util import CouchDBTestCase
@@ -10,174 +9,27 @@ class CouchDesignDocsTests(CouchDBTestCase):
def setUp(self):
CouchDBTestCase.setUp(self)
+ self.create_db()
- def create_db(self, ensure=True, dbname=None):
+ def create_db(self, dbname=None):
if not dbname:
dbname = ('test-%s' % uuid4().hex)
if dbname not in self.couch_server:
self.couch_server.create(dbname)
self.db = couch.CouchDatabase(
- ('http://127.0.0.1:%d' % self.couch_port),
- dbname,
- ensure_ddocs=ensure)
+ (self.couch_url),
+ dbname)
def tearDown(self):
self.db.delete_database()
self.db.close()
CouchDBTestCase.tearDown(self)
- def test_missing_design_doc_raises(self):
- """
- Test that all methods that access design documents will raise if the
- design docs are not present.
- """
- self.create_db(ensure=False)
- # get_generation_info()
- self.assertRaises(
- errors.MissingDesignDocError,
- self.db.get_generation_info)
- # get_trans_id_for_gen()
- self.assertRaises(
- errors.MissingDesignDocError,
- self.db.get_trans_id_for_gen, 1)
- # get_transaction_log()
- self.assertRaises(
- errors.MissingDesignDocError,
- self.db.get_transaction_log)
- # whats_changed()
- self.assertRaises(
- errors.MissingDesignDocError,
- self.db.whats_changed)
-
- def test_missing_design_doc_functions_raises(self):
- """
- Test that all methods that access design documents list functions
- will raise if the functions are not present.
- """
- self.create_db(ensure=True)
- # erase views from _design/transactions
- transactions = self.db._database['_design/transactions']
- transactions['lists'] = {}
- self.db._database.save(transactions)
- # get_generation_info()
- self.assertRaises(
- errors.MissingDesignDocListFunctionError,
- self.db.get_generation_info)
- # get_trans_id_for_gen()
- self.assertRaises(
- errors.MissingDesignDocListFunctionError,
- self.db.get_trans_id_for_gen, 1)
- # whats_changed()
- self.assertRaises(
- errors.MissingDesignDocListFunctionError,
- self.db.whats_changed)
-
- def test_absent_design_doc_functions_raises(self):
- """
- Test that all methods that access design documents list functions
- will raise if the functions are not present.
- """
- self.create_db(ensure=True)
- # erase views from _design/transactions
- transactions = self.db._database['_design/transactions']
- del transactions['lists']
- self.db._database.save(transactions)
- # get_generation_info()
- self.assertRaises(
- errors.MissingDesignDocListFunctionError,
- self.db.get_generation_info)
- # _get_trans_id_for_gen()
- self.assertRaises(
- errors.MissingDesignDocListFunctionError,
- self.db.get_trans_id_for_gen, 1)
- # whats_changed()
- self.assertRaises(
- errors.MissingDesignDocListFunctionError,
- self.db.whats_changed)
-
- def test_missing_design_doc_named_views_raises(self):
- """
- Test that all methods that access design documents' named views will
- raise if the views are not present.
- """
- self.create_db(ensure=True)
- # erase views from _design/docs
- docs = self.db._database['_design/docs']
- del docs['views']
- self.db._database.save(docs)
- # erase views from _design/syncs
- syncs = self.db._database['_design/syncs']
- del syncs['views']
- self.db._database.save(syncs)
- # erase views from _design/transactions
- transactions = self.db._database['_design/transactions']
- del transactions['views']
- self.db._database.save(transactions)
- # get_generation_info()
- self.assertRaises(
- errors.MissingDesignDocNamedViewError,
- self.db.get_generation_info)
- # _get_trans_id_for_gen()
- self.assertRaises(
- errors.MissingDesignDocNamedViewError,
- self.db.get_trans_id_for_gen, 1)
- # _get_transaction_log()
- self.assertRaises(
- errors.MissingDesignDocNamedViewError,
- self.db.get_transaction_log)
- # whats_changed()
- self.assertRaises(
- errors.MissingDesignDocNamedViewError,
- self.db.whats_changed)
-
- def test_deleted_design_doc_raises(self):
- """
- Test that all methods that access design documents will raise if the
- design docs are not present.
- """
- self.create_db(ensure=True)
- # delete _design/docs
- del self.db._database['_design/docs']
- # delete _design/syncs
- del self.db._database['_design/syncs']
- # delete _design/transactions
- del self.db._database['_design/transactions']
- # get_generation_info()
- self.assertRaises(
- errors.MissingDesignDocDeletedError,
- self.db.get_generation_info)
- # get_trans_id_for_gen()
- self.assertRaises(
- errors.MissingDesignDocDeletedError,
- self.db.get_trans_id_for_gen, 1)
- # get_transaction_log()
- self.assertRaises(
- errors.MissingDesignDocDeletedError,
- self.db.get_transaction_log)
- # whats_changed()
- self.assertRaises(
- errors.MissingDesignDocDeletedError,
- self.db.whats_changed)
-
- def test_ensure_ddoc_independently(self):
- """
- Test that a missing ddocs other than _design/docs will be ensured
- even if _design/docs is there.
- """
- self.create_db(ensure=True)
- del self.db._database['_design/transactions']
- self.assertRaises(
- errors.MissingDesignDocDeletedError,
- self.db.get_transaction_log)
- self.create_db(ensure=True, dbname=self.db._dbname)
- self.db.get_transaction_log()
-
def test_ensure_security_doc(self):
"""
Ensure_security creates a _security ddoc to ensure that only soledad
will have the lowest privileged access to an user db.
"""
- self.create_db(ensure=False)
self.assertFalse(self.db._database.resource.get_json('_security')[2])
self.db.ensure_security_ddoc()
security_ddoc = self.db._database.resource.get_json('_security')[2]
@@ -190,7 +42,6 @@ class CouchDesignDocsTests(CouchDBTestCase):
"""
Given a configuration, follow it to create the security document
"""
- self.create_db(ensure=False)
configuration = {'members': ['user1', 'user2'],
'members_roles': ['role1', 'role2'],
'admins': ['admin'],
diff --git a/testing/tests/couch/test_state.py b/testing/tests/couch/test_state.py
new file mode 100644
index 00000000..e293b5b8
--- /dev/null
+++ b/testing/tests/couch/test_state.py
@@ -0,0 +1,25 @@
+import pytest
+
+from leap.soledad.common.couch import CONFIG_DOC_ID
+from leap.soledad.common.couch import SCHEMA_VERSION
+from leap.soledad.common.couch import SCHEMA_VERSION_KEY
+from leap.soledad.common.couch.state import CouchServerState
+
+from leap.soledad.common.errors import WrongCouchSchemaVersionError
+from leap.soledad.common.errors import MissingCouchConfigDocumentError
+
+
+def test_wrong_couch_version_raises(db):
+ wrong_schema_version = SCHEMA_VERSION + 1
+ db.database.create(
+ {'_id': CONFIG_DOC_ID, SCHEMA_VERSION_KEY: wrong_schema_version})
+ with pytest.raises(WrongCouchSchemaVersionError):
+ CouchServerState(db.couch_url, create_cmd='/bin/echo',
+ check_schema_versions=True)
+
+
+def test_missing_config_doc_raises(db):
+ db.database.create({})
+ with pytest.raises(MissingCouchConfigDocumentError):
+ CouchServerState(db.couch_url, create_cmd='/bin/echo',
+ check_schema_versions=True)
diff --git a/testing/tests/perf/assets/cert_default.conf b/testing/tests/perf/assets/cert_default.conf
new file mode 100644
index 00000000..8043cea3
--- /dev/null
+++ b/testing/tests/perf/assets/cert_default.conf
@@ -0,0 +1,15 @@
+[ req ]
+default_bits = 1024
+default_keyfile = keyfile.pem
+distinguished_name = req_distinguished_name
+prompt = no
+output_password = mypass
+
+[ req_distinguished_name ]
+C = GB
+ST = Test State or Province
+L = Test Locality
+O = Organization Name
+OU = Organizational Unit Name
+CN = localhost
+emailAddress = test@email.address
diff --git a/testing/tests/perf/conftest.py b/testing/tests/perf/conftest.py
new file mode 100644
index 00000000..5ac1f3c0
--- /dev/null
+++ b/testing/tests/perf/conftest.py
@@ -0,0 +1,249 @@
+import json
+import os
+import pytest
+import requests
+import random
+import base64
+import signal
+import time
+
+from hashlib import sha512
+from uuid import uuid4
+from subprocess import call
+from urlparse import urljoin
+from twisted.internet import threads, reactor
+
+from leap.soledad.client import Soledad
+from leap.soledad.common.couch import CouchDatabase
+
+
+# we have to manually setup the events server in order to be able to signal
+# events. This is usually done by the enclosing application using soledad
+# client (i.e. bitmask client).
+from leap.common.events import server
+server.ensure_server()
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ "--couch-url", type="string", default="http://127.0.0.1:5984",
+ help="the url for the couch server to be used during tests")
+ parser.addoption(
+ "--num-docs", type="int", default=100,
+ help="the number of documents to use in performance tests")
+
+
+#
+# default options for all tests
+#
+
+DEFAULT_PASSPHRASE = '123'
+
+DEFAULT_URL = 'http://127.0.0.1:2424'
+DEFAULT_PRIVKEY = 'soledad_privkey.pem'
+DEFAULT_CERTKEY = 'soledad_certkey.pem'
+DEFAULT_TOKEN = 'an-auth-token'
+
+
+@pytest.fixture()
+def payload():
+ def generate(size):
+ random.seed(1337) # same seed to avoid different bench results
+ payload_bytes = bytearray(random.getrandbits(8) for _ in xrange(size))
+ # encode as base64 to avoid ascii encode/decode errors
+ return base64.b64encode(payload_bytes)[:size] # remove b64 overhead
+ return generate
+
+
+#
+# soledad_dbs fixture: provides all databases needed by soledad server in a per
+# module scope (same databases for all tests in this module).
+#
+
+def _token_dbname():
+ dbname = 'tokens_' + \
+ str(int(time.time() / (30 * 24 * 3600)))
+ return dbname
+
+
+class SoledadDatabases(object):
+
+ def __init__(self, url):
+ self._token_db_url = urljoin(url, _token_dbname())
+ self._shared_db_url = urljoin(url, 'shared')
+
+ def setup(self, uuid):
+ self._create_dbs()
+ self._add_token(uuid)
+
+ def _create_dbs(self):
+ requests.put(self._token_db_url)
+ requests.put(self._shared_db_url)
+
+ def _add_token(self, uuid):
+ token = sha512(DEFAULT_TOKEN).hexdigest()
+ content = {'type': 'Token', 'user_id': uuid}
+ requests.put(
+ self._token_db_url + '/' + token, data=json.dumps(content))
+
+ def teardown(self):
+ requests.delete(self._token_db_url)
+ requests.delete(self._shared_db_url)
+
+
+@pytest.fixture()
+def soledad_dbs(request):
+ couch_url = request.config.option.couch_url
+
+ def create(uuid):
+ db = SoledadDatabases(couch_url)
+ request.addfinalizer(db.teardown)
+ return db.setup(uuid)
+ return create
+
+
+#
+# remote_db fixture: provides an empty database for a given user in a per
+# function scope.
+#
+
+class UserDatabase(object):
+
+ def __init__(self, url, uuid):
+ self._remote_db_url = urljoin(url, 'user-%s' % uuid)
+
+ def setup(self):
+ return CouchDatabase.open_database(
+ url=self._remote_db_url, create=True, replica_uid=None)
+
+ def teardown(self):
+ requests.delete(self._remote_db_url)
+
+
+@pytest.fixture()
+def remote_db(request):
+ couch_url = request.config.option.couch_url
+
+ def create(uuid):
+ db = UserDatabase(couch_url, uuid)
+ request.addfinalizer(db.teardown)
+ return db.setup()
+ return create
+
+
+def get_pid(pidfile):
+ if not os.path.isfile(pidfile):
+ return 0
+ try:
+ with open(pidfile) as f:
+ return int(f.read())
+ except IOError:
+ return 0
+
+
+#
+# soledad_server fixture: provides a running soledad server in a per module
+# context (same soledad server for all tests in this module).
+#
+
+class SoledadServer(object):
+
+ def __init__(self, tmpdir_factory, couch_url):
+ tmpdir = tmpdir_factory.mktemp('soledad-server')
+ self._pidfile = os.path.join(tmpdir.strpath, 'soledad-server.pid')
+ self._logfile = os.path.join(tmpdir.strpath, 'soledad-server.log')
+ self._couch_url = couch_url
+
+ def start(self):
+ self._create_conf_file()
+ # start the server
+ call([
+ 'twistd',
+ '--logfile=%s' % self._logfile,
+ '--pidfile=%s' % self._pidfile,
+ 'web',
+ '--wsgi=leap.soledad.server.application',
+ '--port=2424'
+ ])
+
+ def _create_conf_file(self):
+ if not os.access('/etc', os.W_OK):
+ return
+ if not os.path.isdir('/etc/soledad'):
+ os.mkdir('/etc/soledad')
+ with open('/etc/soledad/soledad-server.conf', 'w') as f:
+ content = '[soledad-server]\ncouch_url = %s' % self._couch_url
+ f.write(content)
+
+ def stop(self):
+ pid = get_pid(self._pidfile)
+ os.kill(pid, signal.SIGKILL)
+
+
+@pytest.fixture(scope='module')
+def soledad_server(tmpdir_factory, request):
+ couch_url = request.config.option.couch_url
+ server = SoledadServer(tmpdir_factory, couch_url)
+ server.start()
+ request.addfinalizer(server.stop)
+ return server
+
+
+@pytest.fixture()
+def txbenchmark(benchmark):
+ def blockOnThread(*args, **kwargs):
+ return threads.deferToThread(
+ benchmark, threads.blockingCallFromThread,
+ reactor, *args, **kwargs)
+ return blockOnThread
+
+
+@pytest.fixture()
+def txbenchmark_with_setup(benchmark):
+ def blockOnThreadWithSetup(setup, f):
+ def blocking_runner(*args, **kwargs):
+ return threads.blockingCallFromThread(reactor, f, *args, **kwargs)
+
+ def blocking_setup():
+ args = threads.blockingCallFromThread(reactor, setup)
+ try:
+ return tuple(arg for arg in args), {}
+ except TypeError:
+ return ((args,), {}) if args else None
+
+ def bench():
+ return benchmark.pedantic(blocking_runner, setup=blocking_setup,
+ rounds=4, warmup_rounds=1)
+ return threads.deferToThread(bench)
+ return blockOnThreadWithSetup
+
+
+#
+# soledad_client fixture: provides a clean soledad client for a test function.
+#
+
+@pytest.fixture()
+def soledad_client(tmpdir, soledad_server, remote_db, soledad_dbs, request):
+ passphrase = DEFAULT_PASSPHRASE
+ server_url = DEFAULT_URL
+ token = DEFAULT_TOKEN
+ default_uuid = uuid4().hex
+ remote_db(default_uuid)
+ soledad_dbs(default_uuid)
+
+ # get a soledad instance
+ def create():
+ secrets_path = os.path.join(tmpdir.strpath, '%s.secret' % uuid4().hex)
+ local_db_path = os.path.join(tmpdir.strpath, '%s.db' % uuid4().hex)
+ soledad_client = Soledad(
+ default_uuid,
+ unicode(passphrase),
+ secrets_path=secrets_path,
+ local_db_path=local_db_path,
+ server_url=server_url,
+ cert_file=None,
+ auth_token=token,
+ defer_encryption=True)
+ request.addfinalizer(soledad_client.close)
+ return soledad_client
+ return create
diff --git a/testing/tests/perf/pytest.ini b/testing/tests/perf/pytest.ini
new file mode 100644
index 00000000..7a0508ce
--- /dev/null
+++ b/testing/tests/perf/pytest.ini
@@ -0,0 +1,2 @@
+[pytest]
+twisted = yes
diff --git a/testing/tests/perf/test_crypto.py b/testing/tests/perf/test_crypto.py
new file mode 100644
index 00000000..be00560b
--- /dev/null
+++ b/testing/tests/perf/test_crypto.py
@@ -0,0 +1,81 @@
+import pytest
+import json
+from uuid import uuid4
+from leap.soledad.common.document import SoledadDocument
+from leap.soledad.client.crypto import encrypt_sym
+from leap.soledad.client.crypto import decrypt_sym
+
+
+def create_doc_encryption(size):
+ @pytest.mark.benchmark(group="test_crypto_encrypt_doc")
+ def test_doc_encryption(soledad_client, benchmark, payload):
+ crypto = soledad_client()._crypto
+
+ DOC_CONTENT = {'payload': payload(size)}
+ doc = SoledadDocument(
+ doc_id=uuid4().hex, rev='rev',
+ json=json.dumps(DOC_CONTENT))
+
+ benchmark(crypto.encrypt_doc, doc)
+ return test_doc_encryption
+
+
+def create_doc_decryption(size):
+ @pytest.mark.benchmark(group="test_crypto_decrypt_doc")
+ def test_doc_decryption(soledad_client, benchmark, payload):
+ crypto = soledad_client()._crypto
+
+ DOC_CONTENT = {'payload': payload(size)}
+ doc = SoledadDocument(
+ doc_id=uuid4().hex, rev='rev',
+ json=json.dumps(DOC_CONTENT))
+ encrypted_doc = crypto.encrypt_doc(doc)
+ doc.set_json(encrypted_doc)
+
+ benchmark(crypto.decrypt_doc, doc)
+ return test_doc_decryption
+
+
+test_encrypt_doc_10k = create_doc_encryption(10*1000)
+test_encrypt_doc_100k = create_doc_encryption(100*1000)
+test_encrypt_doc_500k = create_doc_encryption(500*1000)
+test_encrypt_doc_1M = create_doc_encryption(1000*1000)
+test_encrypt_doc_10M = create_doc_encryption(10*1000*1000)
+test_encrypt_doc_50M = create_doc_encryption(50*1000*1000)
+test_decrypt_doc_10k = create_doc_decryption(10*1000)
+test_decrypt_doc_100k = create_doc_decryption(100*1000)
+test_decrypt_doc_500k = create_doc_decryption(500*1000)
+test_decrypt_doc_1M = create_doc_decryption(1000*1000)
+test_decrypt_doc_10M = create_doc_decryption(10*1000*1000)
+test_decrypt_doc_50M = create_doc_decryption(50*1000*1000)
+
+
+def create_raw_encryption(size):
+ @pytest.mark.benchmark(group="test_crypto_raw_encrypt")
+ def test_raw_encrypt(benchmark, payload):
+ key = payload(32)
+ benchmark(encrypt_sym, payload(size), key)
+ return test_raw_encrypt
+
+
+def create_raw_decryption(size):
+ @pytest.mark.benchmark(group="test_crypto_raw_decrypt")
+ def test_raw_decrypt(benchmark, payload):
+ key = payload(32)
+ iv, ciphertext = encrypt_sym(payload(size), key)
+ benchmark(decrypt_sym, ciphertext, key, iv)
+ return test_raw_decrypt
+
+
+test_encrypt_raw_10k = create_raw_encryption(10*1000)
+test_encrypt_raw_100k = create_raw_encryption(100*1000)
+test_encrypt_raw_500k = create_raw_encryption(500*1000)
+test_encrypt_raw_1M = create_raw_encryption(1000*1000)
+test_encrypt_raw_10M = create_raw_encryption(10*1000*1000)
+test_encrypt_raw_50M = create_raw_encryption(50*1000*1000)
+test_decrypt_raw_10k = create_raw_decryption(10*1000)
+test_decrypt_raw_100k = create_raw_decryption(100*1000)
+test_decrypt_raw_500k = create_raw_decryption(500*1000)
+test_decrypt_raw_1M = create_raw_decryption(1000*1000)
+test_decrypt_raw_10M = create_raw_decryption(10*1000*1000)
+test_decrypt_raw_50M = create_raw_decryption(50*1000*1000)
diff --git a/testing/tests/perf/test_encdecpool.py b/testing/tests/perf/test_encdecpool.py
new file mode 100644
index 00000000..77091a41
--- /dev/null
+++ b/testing/tests/perf/test_encdecpool.py
@@ -0,0 +1,78 @@
+import pytest
+import json
+from uuid import uuid4
+from twisted.internet.defer import gatherResults
+from leap.soledad.client.encdecpool import SyncEncrypterPool
+from leap.soledad.client.encdecpool import SyncDecrypterPool
+from leap.soledad.common.document import SoledadDocument
+# FIXME: test load is low due issue #7370, higher values will get out of memory
+
+
+def create_encrypt(amount, size):
+ @pytest.mark.benchmark(group="test_pool_encrypt")
+ @pytest.inlineCallbacks
+ def test(soledad_client, txbenchmark_with_setup, request, payload):
+ DOC_CONTENT = {'payload': payload(size)}
+
+ def setup():
+ client = soledad_client()
+ pool = SyncEncrypterPool(client._crypto, client._sync_db)
+ pool.start()
+ request.addfinalizer(pool.stop)
+ docs = [
+ SoledadDocument(doc_id=uuid4().hex, rev='rev',
+ json=json.dumps(DOC_CONTENT))
+ for _ in xrange(amount)
+ ]
+ return pool, docs
+
+ @pytest.inlineCallbacks
+ def put_and_wait(pool, docs):
+ yield gatherResults([pool.encrypt_doc(doc) for doc in docs])
+
+ yield txbenchmark_with_setup(setup, put_and_wait)
+ return test
+
+test_encdecpool_encrypt_100_10k = create_encrypt(100, 10*1000)
+test_encdecpool_encrypt_100_100k = create_encrypt(100, 100*1000)
+test_encdecpool_encrypt_100_500k = create_encrypt(100, 500*1000)
+
+
+def create_decrypt(amount, size):
+ @pytest.mark.benchmark(group="test_pool_decrypt")
+ @pytest.inlineCallbacks
+ def test(soledad_client, txbenchmark_with_setup, request, payload):
+ DOC_CONTENT = {'payload': payload(size)}
+ client = soledad_client()
+
+ def setup():
+ pool = SyncDecrypterPool(
+ client._crypto,
+ client._sync_db,
+ source_replica_uid=client._dbpool.replica_uid,
+ insert_doc_cb=lambda x, y, z: False) # ignored
+ pool.start(amount)
+ request.addfinalizer(pool.stop)
+ crypto = client._crypto
+ docs = []
+ for _ in xrange(amount):
+ doc = SoledadDocument(
+ doc_id=uuid4().hex, rev='rev',
+ json=json.dumps(DOC_CONTENT))
+ encrypted_content = json.loads(crypto.encrypt_doc(doc))
+ docs.append((doc.doc_id, encrypted_content))
+ return pool, docs
+
+ def put_and_wait(pool, docs):
+ deferreds = [] # fires on completion
+ for idx, (doc_id, content) in enumerate(docs, 1):
+ deferreds.append(pool.insert_encrypted_received_doc(
+ doc_id, 'rev', content, idx, "trans_id", idx))
+ return gatherResults(deferreds)
+
+ yield txbenchmark_with_setup(setup, put_and_wait)
+ return test
+
+test_encdecpool_decrypt_100_10k = create_decrypt(100, 10*1000)
+test_encdecpool_decrypt_100_100k = create_decrypt(100, 100*1000)
+test_encdecpool_decrypt_100_500k = create_decrypt(100, 500*1000)
diff --git a/testing/tests/perf/test_misc.py b/testing/tests/perf/test_misc.py
new file mode 100644
index 00000000..ead48adf
--- /dev/null
+++ b/testing/tests/perf/test_misc.py
@@ -0,0 +1,6 @@
+import pytest
+
+
+@pytest.mark.benchmark(group="test_instance")
+def test_initialization(soledad_client, benchmark):
+ benchmark(soledad_client)
diff --git a/testing/tests/perf/test_sqlcipher.py b/testing/tests/perf/test_sqlcipher.py
new file mode 100644
index 00000000..e7a54228
--- /dev/null
+++ b/testing/tests/perf/test_sqlcipher.py
@@ -0,0 +1,38 @@
+'''
+Tests SoledadClient/SQLCipher interaction
+'''
+import pytest
+
+from twisted.internet.defer import gatherResults
+
+
+def load_up(client, amount, payload, defer=True):
+ results = [client.create_doc({'content': payload}) for _ in xrange(amount)]
+ if defer:
+ return gatherResults(results)
+
+
+def build_test_sqlcipher_async_create(amount, size):
+ @pytest.inlineCallbacks
+ @pytest.mark.benchmark(group="test_sqlcipher_async_create")
+ def test(soledad_client, txbenchmark, payload):
+ client = soledad_client()
+ yield txbenchmark(load_up, client, amount, payload(size))
+ return test
+
+
+def build_test_sqlcipher_create(amount, size):
+ @pytest.mark.benchmark(group="test_sqlcipher_create")
+ def test(soledad_client, benchmark, payload):
+ client = soledad_client()._dbsyncer
+ benchmark(load_up, client, amount, payload(size), defer=False)
+ return test
+
+
+test_async_create_20_500k = build_test_sqlcipher_async_create(20, 500*1000)
+test_async_create_100_100k = build_test_sqlcipher_async_create(100, 100*1000)
+test_async_create_1000_10k = build_test_sqlcipher_async_create(1000, 10*1000)
+# synchronous
+test_create_20_500k = build_test_sqlcipher_create(20, 500*1000)
+test_create_100_100k = build_test_sqlcipher_create(100, 100*1000)
+test_create_1000_10k = build_test_sqlcipher_create(1000, 10*1000)
diff --git a/testing/tests/perf/test_sync.py b/testing/tests/perf/test_sync.py
new file mode 100644
index 00000000..0b48a0b9
--- /dev/null
+++ b/testing/tests/perf/test_sync.py
@@ -0,0 +1,68 @@
+import pytest
+
+from twisted.internet.defer import gatherResults
+
+
+def load_up(client, amount, payload):
+ deferreds = []
+ # create a bunch of local documents
+ for i in xrange(amount):
+ d = client.create_doc({'content': payload})
+ deferreds.append(d)
+ d = gatherResults(deferreds)
+ d.addCallback(lambda _: None)
+ return d
+
+
+def create_upload(uploads, size):
+ @pytest.inlineCallbacks
+ @pytest.mark.benchmark(group="test_upload")
+ def test(soledad_client, txbenchmark_with_setup, payload):
+ client = soledad_client()
+
+ def setup():
+ return load_up(client, uploads, payload(size))
+
+ yield txbenchmark_with_setup(setup, client.sync)
+ return test
+
+
+test_upload_20_500k = create_upload(20, 500*1000)
+test_upload_100_100k = create_upload(100, 100*1000)
+test_upload_1000_10k = create_upload(1000, 10*1000)
+
+
+def create_download(downloads, size):
+ @pytest.inlineCallbacks
+ @pytest.mark.benchmark(group="test_download")
+ def test(soledad_client, txbenchmark_with_setup, payload):
+ client = soledad_client()
+
+ yield load_up(client, downloads, payload(size))
+ yield client.sync()
+ # We could create them directly on couch, but sending them
+ # ensures we are dealing with properly encrypted docs
+
+ def setup():
+ return soledad_client()
+
+ def sync(clean_client):
+ return clean_client.sync()
+ yield txbenchmark_with_setup(setup, sync)
+ return test
+
+
+test_download_20_500k = create_download(20, 500*1000)
+test_download_100_100k = create_download(100, 100*1000)
+test_download_1000_10k = create_download(1000, 10*1000)
+
+
+@pytest.inlineCallbacks
+@pytest.mark.benchmark(group="test_nothing_to_sync")
+def test_nothing_to_sync(soledad_client, txbenchmark_with_setup):
+ def setup():
+ return soledad_client()
+
+ def sync(clean_client):
+ return clean_client.sync()
+ yield txbenchmark_with_setup(setup, sync)
diff --git a/testing/tests/server/test_server.py b/testing/tests/server/test_server.py
index b99d1939..6bbcf002 100644
--- a/testing/tests/server/test_server.py
+++ b/testing/tests/server/test_server.py
@@ -20,7 +20,7 @@ Tests for server-related functionality.
import binascii
import mock
import os
-import tempfile
+import pytest
from hashlib import sha512
from pkg_resources import resource_filename
@@ -43,8 +43,8 @@ from test_soledad.util import (
from leap.soledad.common import crypto
from leap.soledad.client import Soledad
-from leap.soledad.server import load_configuration
-from leap.soledad.server import CONFIG_DEFAULTS
+from leap.soledad.server.config import load_configuration
+from leap.soledad.server.config import CONFIG_DEFAULTS
from leap.soledad.server.auth import URLToAuthorization
from leap.soledad.server.auth import SoledadTokenAuthMiddleware
@@ -287,6 +287,7 @@ class ServerAuthorizationTestCase(BaseSoledadTest):
self._make_environ('/%s/sync-from/x' % dbname, 'POST')))
+@pytest.mark.usefixtures("method_tmpdir")
class EncryptedSyncTestCase(
CouchDBTestCase, TestCaseWithServer):
@@ -349,11 +350,7 @@ class EncryptedSyncTestCase(
return self.make_app_with_state(self.request_state)
def setUp(self):
- # the order of the following initializations is crucial because of
- # dependencies.
- # XXX explain better
CouchDBTestCase.setUp(self)
- self.tempdir = tempfile.mkdtemp(prefix="leap_tests-")
TestCaseWithServer.setUp(self)
def tearDown(self):
@@ -391,8 +388,7 @@ class EncryptedSyncTestCase(
# ensure remote db exists before syncing
db = CouchDatabase.open_database(
urljoin(self.couch_url, 'user-' + user),
- create=True,
- ensure_ddocs=True)
+ create=True)
def _db1AssertEmptyDocList(results):
_, doclist = results
diff --git a/testing/tests/sqlcipher/test_backend.py b/testing/tests/sqlcipher/test_backend.py
index 11472d46..caacba0d 100644
--- a/testing/tests/sqlcipher/test_backend.py
+++ b/testing/tests/sqlcipher/test_backend.py
@@ -18,10 +18,9 @@
Test sqlcipher backend internals.
"""
import os
+import pytest
import time
import threading
-import tempfile
-import shutil
from pysqlcipher import dbapi2
from testscenarios import TestWithScenarios
@@ -33,7 +32,6 @@ from leap.soledad.common.l2db.backends.sqlite_backend \
import SQLitePartialExpandDatabase
# soledad stuff.
-from leap.soledad.common import soledad_assert
from leap.soledad.common.document import SoledadDocument
from leap.soledad.client.sqlcipher import SQLCipherDatabase
from leap.soledad.client.sqlcipher import SQLCipherOptions
@@ -109,6 +107,7 @@ class SQLCipherIndexTests(
# The following tests come from `u1db.tests.test_sqlite_backend`.
# -----------------------------------------------------------------------------
+@pytest.mark.usefixtures('method_tmpdir')
class TestSQLCipherDatabase(tests.TestCase):
"""
Tests from u1db.tests.test_sqlite_backend.TestSQLiteDatabase.
@@ -117,8 +116,7 @@ class TestSQLCipherDatabase(tests.TestCase):
def test_atomic_initialize(self):
# This test was modified to ensure that db2.close() is called within
# the thread that created the database.
- tmpdir = self.createTempDir()
- dbname = os.path.join(tmpdir, 'atomic.db')
+ dbname = os.path.join(self.tempdir, 'atomic.db')
t2 = None # will be a thread
@@ -164,6 +162,7 @@ class TestSQLCipherDatabase(tests.TestCase):
db1.close()
+@pytest.mark.usefixtures('method_tmpdir')
class TestSQLCipherPartialExpandDatabase(tests.TestCase):
"""
Tests from u1db.tests.test_sqlite_backend.TestSQLitePartialExpandDatabase.
@@ -226,8 +225,7 @@ class TestSQLCipherPartialExpandDatabase(tests.TestCase):
pass
def test__open_database_non_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
+ path = self.tempdir + '/non-existent.sqlite'
self.assertRaises(errors.DatabaseDoesNotExist,
sqlcipher_open,
path, PASSWORD, create=False)
@@ -243,8 +241,7 @@ class TestSQLCipherPartialExpandDatabase(tests.TestCase):
# This test was modified to ensure that an empty database file will
# raise a DatabaseIsNotEncrypted exception instead of a
# dbapi2.OperationalError exception.
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path1 = temp_dir + '/invalid1.db'
+ path1 = self.tempdir + '/invalid1.db'
with open(path1, 'wb') as f:
f.write("")
self.assertRaises(DatabaseIsNotEncrypted,
@@ -270,8 +267,7 @@ class TestSQLCipherPartialExpandDatabase(tests.TestCase):
def test_open_database_create(self):
# SQLCipherDatabas has no open_database() method, so we just test for
# the actual database constructor effects.
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/new.sqlite'
+ path = self.tempdir + '/new.sqlite'
db1 = sqlcipher_open(path, PASSWORD, create=True)
db2 = sqlcipher_open(path, PASSWORD, create=False)
self.assertIsInstance(db2, SQLCipherDatabase)
@@ -395,8 +391,7 @@ class TestSQLCipherPartialExpandDatabase(tests.TestCase):
c.fetchall())
def test__ensure_schema_rollback(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/rollback.db'
+ path = self.tempdir + '/rollback.db'
class SQLitePartialExpandDbTesting(SQLCipherDatabase):
@@ -414,15 +409,13 @@ class TestSQLCipherPartialExpandDatabase(tests.TestCase):
db._initialize(db._db_handle.cursor())
def test_open_database_non_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
+ path = self.tempdir + '/non-existent.sqlite'
self.assertRaises(errors.DatabaseDoesNotExist,
sqlcipher_open, path, "123",
create=False)
def test_delete_database_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/new.sqlite'
+ path = self.tempdir + '/new.sqlite'
db = sqlcipher_open(path, "123", create=True)
db.close()
SQLCipherDatabase.delete_database(path)
@@ -431,8 +424,7 @@ class TestSQLCipherPartialExpandDatabase(tests.TestCase):
create=False)
def test_delete_database_nonexistent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
+ path = self.tempdir + '/non-existent.sqlite'
self.assertRaises(errors.DatabaseDoesNotExist,
SQLCipherDatabase.delete_database, path)
@@ -630,37 +622,13 @@ class SQLCipherEncryptionTests(BaseSoledadTest):
os.unlink(dbfile)
def setUp(self):
- # the following come from BaseLeapTest.setUpClass, because
- # twisted.trial doesn't support such class methods for setting up
- # test classes.
- self.old_path = os.environ['PATH']
- self.old_home = os.environ['HOME']
- self.tempdir = tempfile.mkdtemp(prefix="leap_tests-")
- self.home = self.tempdir
- bin_tdir = os.path.join(
- self.tempdir,
- 'bin')
- os.environ["PATH"] = bin_tdir
- os.environ["HOME"] = self.tempdir
- # this is our own stuff
+ BaseSoledadTest.setUp(self)
self.DB_FILE = os.path.join(self.tempdir, 'test.db')
self._delete_dbfiles()
def tearDown(self):
self._delete_dbfiles()
- # the following come from BaseLeapTest.tearDownClass, because
- # twisted.trial doesn't support such class methods for tearing down
- # test classes.
- os.environ["PATH"] = self.old_path
- os.environ["HOME"] = self.old_home
- # safety check! please do not wipe my home...
- # XXX needs to adapt to non-linuces
- soledad_assert(
- self.tempdir.startswith('/tmp/leap_tests-') or
- self.tempdir.startswith('/var/folder'),
- "beware! tried to remove a dir which does not "
- "live in temporal folder!")
- shutil.rmtree(self.tempdir)
+ BaseSoledadTest.tearDown(self)
def test_try_to_open_encrypted_db_with_sqlite_backend(self):
"""
diff --git a/testing/tests/sync/test_encdecpool.py b/testing/tests/sync/test_encdecpool.py
index 82e99a47..4a32885e 100644
--- a/testing/tests/sync/test_encdecpool.py
+++ b/testing/tests/sync/test_encdecpool.py
@@ -29,7 +29,6 @@ from leap.soledad.client.encdecpool import SyncDecrypterPool
from leap.soledad.common.document import SoledadDocument
from test_soledad.util import BaseSoledadTest
from twisted.internet import defer
-from twisted.test.proto_helpers import MemoryReactorClock
DOC_ID = "mydoc"
DOC_REV = "rev"
@@ -65,17 +64,11 @@ class TestSyncEncrypterPool(BaseSoledadTest):
"""
doc = SoledadDocument(
doc_id=DOC_ID, rev=DOC_REV, json=json.dumps(DOC_CONTENT))
- self._pool.encrypt_doc(doc)
- # exhaustivelly attempt to get the encrypted document
- encrypted = None
- attempts = 0
- while encrypted is None and attempts < 10:
- encrypted = yield self._pool.get_encrypted_doc(DOC_ID, DOC_REV)
- attempts += 1
+ yield self._pool.encrypt_doc(doc)
+ encrypted = yield self._pool.get_encrypted_doc(DOC_ID, DOC_REV)
self.assertIsNotNone(encrypted)
- self.assertTrue(attempts < 10)
class TestSyncDecrypterPool(BaseSoledadTest):
@@ -219,9 +212,6 @@ class TestSyncDecrypterPool(BaseSoledadTest):
This test ensures that processing of documents only occur if there is
a sequence in place.
"""
- reactor_clock = MemoryReactorClock()
- self._pool._loop.clock = reactor_clock
-
crypto = self._soledad._crypto
docs = []
@@ -234,18 +224,19 @@ class TestSyncDecrypterPool(BaseSoledadTest):
docs.append((doc, encrypted_content))
# insert the encrypted document in the pool
- self._pool.start(10) # pool is expecting to process 10 docs
+ yield self._pool.start(10) # pool is expecting to process 10 docs
+ self._pool._loop.stop() # we are processing manually
# first three arrives, forming a sequence
for i, (doc, encrypted_content) in enumerate(docs[:3]):
gen = idx = i + 1
yield self._pool.insert_encrypted_received_doc(
doc.doc_id, doc.rev, encrypted_content, gen, "trans_id", idx)
+
# last one arrives alone, so it can't be processed
doc, encrypted_content = docs[-1]
yield self._pool.insert_encrypted_received_doc(
doc.doc_id, doc.rev, encrypted_content, 10, "trans_id", 10)
- reactor_clock.advance(self._pool.DECRYPT_LOOP_PERIOD)
yield self._pool._decrypt_and_recurse()
self.assertEqual(3, self._pool._processed_docs)
diff --git a/testing/tests/sync/test_sync.py b/testing/tests/sync/test_sync.py
index 095884ce..5290003e 100644
--- a/testing/tests/sync/test_sync.py
+++ b/testing/tests/sync/test_sync.py
@@ -15,7 +15,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
-import tempfile
import threading
import time
@@ -60,7 +59,6 @@ class InterruptableSyncTestCase(
def setUp(self):
TestCaseWithServer.setUp(self)
CouchDBTestCase.setUp(self)
- self.tempdir = tempfile.mkdtemp(prefix="leap_tests-")
def tearDown(self):
CouchDBTestCase.tearDown(self)
@@ -101,8 +99,7 @@ class InterruptableSyncTestCase(
# ensure remote db exists before syncing
db = couch.CouchDatabase.open_database(
urljoin(self.couch_url, 'user-user-uuid'),
- create=True,
- ensure_ddocs=True)
+ create=True)
# create interruptor thread
t = _SyncInterruptor(sol, db)
diff --git a/testing/tests/sync/test_sync_mutex.py b/testing/tests/sync/test_sync_mutex.py
index 787cfee8..2626ab2a 100644
--- a/testing/tests/sync/test_sync_mutex.py
+++ b/testing/tests/sync/test_sync_mutex.py
@@ -24,8 +24,6 @@ be two concurrent synchronization processes at the same time.
import time
import uuid
-import tempfile
-import shutil
from urlparse import urljoin
@@ -91,13 +89,11 @@ class TestSyncMutex(
def setUp(self):
TestCaseWithServer.setUp(self)
CouchDBTestCase.setUp(self)
- self.tempdir = tempfile.mkdtemp(prefix="leap_tests-")
self.user = ('user-%s' % uuid.uuid4().hex)
def tearDown(self):
CouchDBTestCase.tearDown(self)
TestCaseWithServer.tearDown(self)
- shutil.rmtree(self.tempdir)
def test_two_concurrent_syncs_do_not_overlap_no_docs(self):
self.startServer()
@@ -105,8 +101,7 @@ class TestSyncMutex(
# ensure remote db exists before syncing
db = CouchDatabase.open_database(
urljoin(self.couch_url, 'user-' + self.user),
- create=True,
- ensure_ddocs=True)
+ create=True)
sol = self._soledad_instance(
user=self.user, server_url=self.getURL())