summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVictor Shyba <victor.shyba@gmail.com>2016-08-20 00:42:34 -0300
committerVictor Shyba <victor.shyba@gmail.com>2016-08-22 12:36:16 -0300
commitb75165567539dcd59873395049ce2210776aa166 (patch)
tree36e3c368fe667780139c0dffc1e8b111eb68e050
parent1dc2c18f02e62644da00cb0e3326f357953f5c84 (diff)
[test] adds encdecpool tests
Most of them are commented as memory usage is going out of control for now.
-rw-r--r--testing/tests/perf/test_encdecpool.py84
1 files changed, 84 insertions, 0 deletions
diff --git a/testing/tests/perf/test_encdecpool.py b/testing/tests/perf/test_encdecpool.py
new file mode 100644
index 00000000..dbbbea89
--- /dev/null
+++ b/testing/tests/perf/test_encdecpool.py
@@ -0,0 +1,84 @@
+import pytest
+import json
+from uuid import uuid4
+from twisted.internet.defer import gatherResults
+from leap.soledad.client.encdecpool import SyncEncrypterPool
+from leap.soledad.client.encdecpool import SyncDecrypterPool
+from leap.soledad.common.document import SoledadDocument
+
+
+def create_encrypt(amount, size):
+ @pytest.mark.benchmark(group="test_pool_encrypt")
+ @pytest.inlineCallbacks
+ def test(soledad_client, txbenchmark_with_setup, request):
+ DOC_CONTENT = {'payload': 'x'*size}
+
+ def setup():
+ client = soledad_client()
+ pool = SyncEncrypterPool(client._crypto, client._sync_db)
+ pool.start()
+ request.addfinalizer(pool.stop)
+ return (pool,), {}
+
+ @pytest.inlineCallbacks
+ def put_and_wait(pool):
+ doc_ids = []
+ deferreds = []
+ for _ in xrange(amount):
+ doc = SoledadDocument(
+ doc_id=uuid4().hex, rev='rev',
+ json=json.dumps(DOC_CONTENT))
+ deferreds.append(pool.encrypt_doc(doc))
+ doc_ids.append(doc.doc_id)
+ yield gatherResults(deferreds)
+
+ yield txbenchmark_with_setup(setup, put_and_wait)
+ return test
+
+test_encrypt_1000_10k = create_encrypt(1000, 10*1000)
+# test_encrypt_1000_500k = create_encrypt(1000, 500*1000)
+# test_encrypt_1000_1M = create_encrypt(1000, 1000*1000)
+# test_encrypt_1000_10M = create_encrypt(1000, 10*1000*1000)
+
+
+def create_decrypt(amount, size):
+ @pytest.mark.benchmark(group="test_pool_decrypt")
+ @pytest.inlineCallbacks
+ def test(soledad_client, txbenchmark_with_setup, request):
+ DOC_CONTENT = {'payload': 'x'*size}
+ client = soledad_client()
+
+ def setup():
+ pool = SyncDecrypterPool(
+ client._crypto,
+ client._sync_db,
+ source_replica_uid=client._dbpool.replica_uid,
+ insert_doc_cb=lambda x, y, z: False) # ignored
+ pool.start(amount)
+ request.addfinalizer(pool.stop)
+ crypto = client._crypto
+ docs = []
+ for _ in xrange(amount):
+ doc = SoledadDocument(
+ doc_id=uuid4().hex, rev='rev',
+ json=json.dumps(DOC_CONTENT))
+ encrypted_content = json.loads(crypto.encrypt_doc(doc))
+ docs.append((doc.doc_id, encrypted_content))
+ return (pool, docs), {}
+
+ def put_and_wait(pool, docs):
+ deferreds = [] # fires on completion
+ for idx, (doc_id, content) in enumerate(docs, 1):
+ deferreds.append(pool.insert_encrypted_received_doc(
+ doc_id, 'rev', content, idx, "trans_id", idx))
+ return gatherResults(deferreds)
+
+ yield txbenchmark_with_setup(setup, put_and_wait)
+ return test
+
+test_decrypt_1000_10k = create_decrypt(1000, 10*1000)
+test_decrypt_1000_100k = create_decrypt(1000, 10*1000)
+# memory issues ahead
+# test_decrypt_1000_500k = create_decrypt(1000, 500*1000)
+# test_decrypt_1000_1M = create_decrypt(1000, 1000*1000)
+# test_decrypt_1000_10M = create_decrypt(1000, 10*1000*1000)